summaryrefslogtreecommitdiff
path: root/tools/test_driver/test_driver.sh
diff options
context:
space:
mode:
Diffstat (limited to 'tools/test_driver/test_driver.sh')
-rwxr-xr-xtools/test_driver/test_driver.sh372
1 files changed, 142 insertions, 230 deletions
diff --git a/tools/test_driver/test_driver.sh b/tools/test_driver/test_driver.sh
index 3bff910f5..72be65dc1 100755
--- a/tools/test_driver/test_driver.sh
+++ b/tools/test_driver/test_driver.sh
@@ -24,32 +24,41 @@ function Usage()
echo "Usage: ./$0 --artifactpath=. # run all tests"
echo "Usage: ./$0 --artifactpath=/home/dragon/nnfw --frameworktest --verification --benchmark # run fw test & verfication and benchmark"
echo ""
- echo "--artifactpath - (should be passed) path that has tests/ and Product/"
+ echo "--artifactpath - (default={test_driver.sh's path}/../../) it should contain tests/ and Product/"
echo ""
- echo "Following three options are needed when you want to tests of specific types. If you don't pass any one, unittest and verification will be run"
- echo "--unittest - (default=on) run unit test"
- echo "--frameworktest - (default=off)run framework test"
- echo "--verification - (default=on) run verification"
+ echo "Following options are needed when you want to tests of specific types. If you don't pass any one, unittest and verification will be run"
+ echo "--unittest - (default=on) run unit test"
+ echo "--unittestall - (default=off) run all unit test without skip, overrite --unittest option"
+ echo "--frameworktest - (default=off) run framework test"
+ echo "--verification - (default=on) run verification"
+ echo "--frameworktest_list_file - filepath of model list for test"
echo ""
echo "Following option is only needed when you want to test benchmark."
- echo "--benchmark - (default=off) run benchmark"
- echo "--benchmark_acl - (default=off) run benchmark-acl"
+ echo "--benchmark_acl - (default=off) run benchmark-acl"
+ echo "--benchmark - (default=off) run benchmark"
+ echo "--benchmark_op - (default=off) run benchmark per operation"
+ echo "--benchmark_tflite_model - (default=off) run tflite_benchmark_model"
+ echo ""
+ echo "Following option is used for profiling."
+ echo "--profile - (default=off) run operf"
echo ""
echo "etc."
- echo "--framework_driverbin - (default=../../Product/out/bin/tflite_run) runner for runnning framework tests"
- echo "--verification_driverbin - (default=../../Product/out/bin/nnapi_test) runner for runnning verification tests"
- echo "--benchmark_driverbin - (default=../../Product/out/bin/tflite_benchmark) runner for runnning benchmark"
- echo "--runtestsh - (default=\$ARTIFACT_PATH/tests/framework/run_test.sh) run_test.sh with path where it is for framework test and verification"
- echo "--unittestdir - (default=\$ARTIFACT_PATH/Product/out/unittest) directory that has unittest binaries for unit test"
+ echo "--framework_driverbin - (default=../../Product/out/bin/tflite_run) runner for runnning framework tests"
+ echo "--verification_driverbin - (default=../../Product/out/bin/nnapi_test) runner for runnning verification tests"
+ echo "--benchmark_driverbin - (default=../../Product/out/bin/tflite_benchmark) runner for runnning benchmark"
+ echo "--runtestsh - (default=\$ARTIFACT_PATH/tests/framework/run_test.sh) run_test.sh with path where it is for framework test and verification"
+ echo "--unittestdir - (default=\$ARTIFACT_PATH/Product/out/unittest) directory that has unittest binaries for unit test"
echo ""
- echo "--ldlibrarypath - (default=\$ARTIFACT_PATH/Product/out/lib) path that you want to include libraries"
- echo "--usennapi - (default=on) declare USE_NNAPI=1"
- echo "--nousennapi - (default=off) declare nothing about USE_NNAPI"
- echo "--acl_envon - (default=off) declare envs for ACL"
+ echo "--ldlibrarypath - (default=\$ARTIFACT_PATH/Product/out/lib) path that you want to include libraries"
+ echo "--usennapi - (default=on) declare USE_NNAPI=1"
+ echo "--nousennapi - (default=off) declare nothing about USE_NNAPI"
+ echo "--acl_envon - (default=off) declare envs for ACL"
+ echo "--reportdir - (default=\$ARTIFACT_PATH/report) directory to save report"
echo ""
}
-ARTIFACT_PATH=""
+TEST_DRIVER_DIR="$( cd "$( dirname "${BASH_SOURCE}" )" && pwd )"
+ARTIFACT_PATH="$TEST_DRIVER_DIR/../../"
FRAMEWORK_DRIVER_BIN=""
VERIFICATION_DRIVER_BIN=""
BENCHMARK_DRIVER_BIN=""
@@ -59,12 +68,16 @@ LD_LIBRARY_PATH_IN_SHELL=""
USE_NNAPI="USE_NNAPI=1"
ALLTEST_ON="true"
UNITTEST_ON="false"
+UNITTESTALL_ON="false"
FRAMEWORKTEST_ON="false"
VERIFICATION_ON="false"
BENCHMARK_ON="false"
+BENCHMARK_OP_ON="false"
+BENCHMARK_TFLITE_MODEL_ON="false"
BENCHMARK_ACL_ON="false"
-MODEL_LIST="inceptionv3/inception_nonslim inceptionv3/inception_slim"
ACL_ENV_ON="false"
+PROFILE_ON="false"
+REPORT_DIR=""
for i in "$@"
do
@@ -104,10 +117,22 @@ do
ALLTEST_ON="false"
UNITTEST_ON="true"
;;
+ --unittestall)
+ ALLTEST_ON="false"
+ UNITTEST_ON="true"
+ UNITTESTALL_ON="true"
+ ;;
--frameworktest)
ALLTEST_ON="false"
FRAMEWORKTEST_ON="true"
;;
+ --frameworktest_list_file=*)
+ FRAMEWORKTEST_LIST_FILE=$PWD/${i#*=}
+ if [ ! -e "$FRAMEWORKTEST_LIST_FILE" ]; then
+ echo "Pass on with proper frameworktest_list_file"
+ exit 1
+ fi
+ ;;
--verification)
ALLTEST_ON="false"
VERIFICATION_ON="true"
@@ -116,6 +141,14 @@ do
ALLTEST_ON="false"
BENCHMARK_ON="true"
;;
+ --benchmark_op)
+ ALLTEST_ON="false"
+ BENCHMARK_OP_ON="true"
+ ;;
+ --benchmark_tflite_model)
+ ALLTEST_ON="false"
+ BENCHMARK_TFLITE_MODEL_ON="true"
+ ;;
--benchmark_acl)
ALLTEST_ON="false"
BENCHMARK_ACL_ON="true"
@@ -123,6 +156,13 @@ do
--acl_envon)
ACL_ENV_ON="true"
;;
+ --profile)
+ ALLTEST_ON="false"
+ PROFILE_ON="true"
+ ;;
+ --reportdir=*)
+ REPORT_DIR=${i#*=}
+ ;;
*)
# Be careful that others params are handled as $ARTIFACT_PATH
ARTIFACT_PATH="$i"
@@ -131,10 +171,6 @@ do
shift
done
-if [ ! -e "$ARTIFACT_PATH" ]; then
- echo "Pass on with proper ARTIFACT_PATH"
- exit 1
-fi
ARTIFACT_PATH="$(readlink -f $ARTIFACT_PATH)"
if [ -z "$RUN_TEST_SH" ]; then
@@ -150,6 +186,10 @@ if [ -z "$UNIT_TEST_DIR" ]; then
UNIT_TEST_DIR=$ARTIFACT_PATH/Product/out/unittest
fi
+if [ -z "$REPORT_DIR" ]; then
+ REPORT_DIR=$ARTIFACT_PATH/report
+fi
+
if [ -z "$LD_LIBRARY_PATH_IN_SHELL" ]; then
LD_LIBRARY_PATH="$ARTIFACT_PATH/Product/out/lib:$LD_LIBRARY_PATH"
else
@@ -162,20 +202,7 @@ if [ -n "$USE_NNAPI" ]; then
export "$USE_NNAPI"
fi
-function switch_nnfw_kernel_env()
-{
- local switch=$1 # "ON" or "OFF"
- local mode=$2 # "acl" or "neon" or ""
- local NNFW_KERNEL_ENV_FILE=$ARTIFACT_PATH/tools/test_driver/nnfw_kernel_env_list.txt
-
- for ENV in $(cat $NNFW_KERNEL_ENV_FILE); do
- if [[ "$switch" == "ON" ]]; then
- export "$ENV=$mode"
- else
- unset "$ENV"
- fi
- done
-}
+source $TEST_DRIVER_DIR/common.sh
if [ "$ACL_ENV_ON" == "true" ]; then
switch_nnfw_kernel_env "ON" "acl"
@@ -183,30 +210,16 @@ fi
# Run unittest in each part such as Runtime, ACL
if [ "$ALLTEST_ON" == "true" ] || [ "$UNITTEST_ON" == "true" ]; then
- if [ ! -e "$ARTIFACT_PATH/report" ]; then
- mkdir -p $ARTIFACT_PATH/report
+ if [ "$UNITTESTALL_ON" == "true" ]; then
+ $TEST_DRIVER_DIR/run_unittest.sh \
+ --reportdir=$REPORT_DIR \
+ --unittestdir=$UNIT_TEST_DIR \
+ --runall
+ else
+ $TEST_DRIVER_DIR/run_unittest.sh \
+ --reportdir=$REPORT_DIR \
+ --unittestdir=$UNIT_TEST_DIR
fi
-
- echo ""
- echo "============================================"
- echo "Unittest start"
- echo "============================================"
-
- num_unittest=0
- for TEST_BIN in `ls $UNIT_TEST_DIR`; do
- num_unittest=$((num_unittest+1))
- echo "============================================"
- echo "Starting set $num_unittest: $TEST_BIN..."
- echo "============================================"
- $UNIT_TEST_DIR/$TEST_BIN --gtest_output=xml:$ARTIFACT_PATH/report/$TEST_BIN.xml
- echo "============================================"
- echo "Finishing set $num_unittest: $TEST_BIN..."
- echo "============================================"
- done
- echo "============================================"
- echo "Completed total $num_unittest set of unittest"
- echo "Unittest end"
- echo "============================================"
fi
# Run tflite_run with various tflite models
@@ -215,21 +228,14 @@ if [ "$FRAMEWORKTEST_ON" == "true" ]; then
FRAMEWORK_DRIVER_BIN=$ARTIFACT_PATH/Product/out/bin/tflite_run
fi
- if [ ! -e "$ARTIFACT_PATH/report" ]; then
- mkdir -p $ARTIFACT_PATH/report
- fi
-
- echo ""
- echo "============================================"
- echo "Framework Test with tflite_run..."
- $RUN_TEST_SH --driverbin=$FRAMEWORK_DRIVER_BIN \
- --reportdir=$ARTIFACT_PATH/report \
+ $TEST_DRIVER_DIR/run_frameworktest.sh \
+ --runtestsh=$RUN_TEST_SH \
+ --driverbin=$FRAMEWORK_DRIVER_BIN \
+ --reportdir=$REPORT_DIR \
--tapname=framework_test.tap \
- > $ARTIFACT_PATH/report/framework_test.log 2>&1
- echo "============================================"
- cat $ARTIFACT_PATH/report/framework_test.tap
- echo "============================================"
- echo ""
+ --logname=framework_test.log \
+ --testname="Frameworktest" \
+ --frameworktest_list_file=${FRAMEWORKTEST_LIST_FILE:-}
fi
# Run nnapi_test with various tflite models
@@ -238,180 +244,86 @@ if [ "$ALLTEST_ON" == "true" ] || [ "$VERIFICATION_ON" == "true" ]; then
VERIFICATION_DRIVER_BIN=$ARTIFACT_PATH/Product/out/bin/nnapi_test
fi
- if [ ! -e "$ARTIFACT_PATH/report" ]; then
- mkdir -p $ARTIFACT_PATH/report
- fi
-
- echo ""
- echo "============================================"
- echo "Verification with nnapi_test..."
- $RUN_TEST_SH --driverbin=$VERIFICATION_DRIVER_BIN \
- --reportdir=$ARTIFACT_PATH/report \
- --tapname=verification.tap \
- > $ARTIFACT_PATH/report/verification.log 2>&1
- echo "============================================"
- cat $ARTIFACT_PATH/report/verification.tap
- echo "============================================"
- echo ""
+ # verification uses the same script as frameworktest does
+ $TEST_DRIVER_DIR/run_frameworktest.sh \
+ --runtestsh=$RUN_TEST_SH \
+ --driverbin=$VERIFICATION_DRIVER_BIN \
+ --reportdir=$REPORT_DIR \
+ --tapname=verification_test.tap \
+ --logname=verification_test.log \
+ --testname="Verification" \
+ --frameworktest_list_file=${FRAMEWORKTEST_LIST_FILE:-}
fi
-# Benchmark test
-function get_result_of_benchmark_test()
-{
- local RUN_TEST_SH=$1
- local DRIVER_BIN=$2
- local MODEL=$3
- local LOG_FILE=$4
-
- $RUN_TEST_SH --driverbin=$DRIVER_BIN $MODEL > $LOG_FILE 2>&1
-
- local RESULT=`grep -E '^Mean:' $LOG_FILE | sed -e 's/ms//g' | awk '{print $2}'`
- echo "$RESULT"
-}
-
-function print_result_of_benchmark_test()
-{
- local NAME=$1
- local RESULT=$2
- local RESULT_FILE=$3
-
- echo "$NAME $RESULT" > $RESULT_FILE
-}
-
-function run_benchmark_test()
-{
- local DRIVER_BIN=
- local LOG_FILE=
- local RESULT_FILE=
- local RESULT=
- local REPORT_MODEL_DIR=
-
+# Run tflite_benchmark with tflite models
+if [ "$BENCHMARK_ON" == "true" ]; then
if [ -z "$BENCHMARK_DRIVER_BIN" ]; then
DRIVER_BIN=$ARTIFACT_PATH/Product/out/bin/tflite_benchmark
else
DRIVER_BIN=$BENCHMARK_DRIVER_BIN
fi
- export COUNT=5
- echo "============================================"
- local i=0
- for MODEL in $MODEL_LIST; do
- echo "Benchmark test with tflite_benchmark & `echo $MODEL`"
- echo $MODEL >> $MODELS_FILE
-
- REPORT_MODEL_DIR=$ARTIFACT_PATH/report/benchmark/$MODEL
- mkdir -p $REPORT_MODEL_DIR
-
- # TFLite+CPU
- LOG_FILE=$REPORT_MODEL_DIR/tflite_cpu.txt
- RESULT_FILE=$REPORT_MODEL_DIR/tflite_cpu.result
- echo -n "TFLite + CPU................... "
- unset USE_NNAPI
- RESULT=$(get_result_of_benchmark_test $RUN_TEST_SH $DRIVER_BIN $MODEL $LOG_FILE)
- echo "$RESULT ms"
- print_result_of_benchmark_test "TFLite_CPU" $RESULT $RESULT_FILE
-
-
- # TFLite+NNAPI(CPU fallback)
- LOG_FILE=$REPORT_MODEL_DIR/tflite_nnapi_cpu.txt
- RESULT_FILE=$REPORT_MODEL_DIR/tflite_nnapi_cpu.result
- echo -n "TFLite + NNAPI(CPU)............ "
- export USE_NNAPI=1
- RESULT=$(get_result_of_benchmark_test $RUN_TEST_SH $DRIVER_BIN $MODEL $LOG_FILE)
- echo "$RESULT ms"
- print_result_of_benchmark_test "TFLite_NNAPI_CPU" $RESULT $RESULT_FILE
-
- # TFLite+NNAPI(ACL)
- LOG_FILE=$REPORT_MODEL_DIR/tflite_nnapi_acl.txt
- RESULT_FILE=$REPORT_MODEL_DIR/tflite_nnapi_acl.result
- echo -n "TFLite + NNAPI(ACL)............ "
- switch_nnfw_kernel_env "ON" "acl"
- RESULT=$(get_result_of_benchmark_test $RUN_TEST_SH $DRIVER_BIN $MODEL $LOG_FILE)
- echo "$RESULT ms"
- print_result_of_benchmark_test "TFLite_NNAPI_ACL" $RESULT $RESULT_FILE
- unset USE_NNAPI
- switch_nnfw_kernel_env "OFF"
-
- if [[ $i -ne $(echo $MODEL_LIST | wc -w)-1 ]]; then
- echo ""
- fi
- i=$((i+1))
- done
- echo "============================================"
- unset COUNT
-}
-
-function run_benchmark_acl()
-{
- local REPORT_DIR=$ARTIFACT_PATH/report/benchmark
- local DRIVER_DIR=$ARTIFACT_PATH/Product/out/bin
- local LOG_FILE=""
- local RESULT_FILE=""
- local RESULT=""
-
- export COUNT=5
- echo "============================================"
- local i=0
- for BENCHMARK_ACL_BIN in $(ls $DRIVER_DIR/benchmark_*); do
- BENCHMARK_ACL_BIN_BASENAME=$(basename $BENCHMARK_ACL_BIN)
- mkdir -p $REPORT_DIR/$BENCHMARK_ACL_BIN_BASENAME
- echo "Benchmark/acl test by $BENCHMARK_ACL_BIN_BASENAME"
- echo $BENCHMARK_ACL_BIN_BASENAME >> $MODELS_FILE
+ $TEST_DRIVER_DIR/run_benchmark.sh \
+ --runtestsh=$RUN_TEST_SH \
+ --driverbin=$DRIVER_BIN \
+ --reportdir=$REPORT_DIR/benchmark
+fi
- # ACL(NEON)
- LOG_FILE=$REPORT_DIR/$BENCHMARK_ACL_BIN_BASENAME/acl_neon.txt
- RESULT_FILE=$REPORT_DIR/$BENCHMARK_ACL_BIN_BASENAME/acl_neon.result
- echo -n "ACL(NEON)...... "
- $BENCHMARK_ACL_BIN 0 > $LOG_FILE 2>&1
- RESULT=`grep -E '^Mean:' $LOG_FILE | sed -e 's/ms//g' | awk '{print $2}'`
- echo "$RESULT ms"
- echo "ACL(NEON)" $RESULT > $RESULT_FILE
+# Run tflite_benchmark from a list of tflite models.
+# Each model has only one operator.
+if [ "$BENCHMARK_OP_ON" == "true" ]; then
+ if [ -z "$BENCHMARK_DRIVER_BIN" ]; then
+ DRIVER_BIN=$ARTIFACT_PATH/Product/out/bin/tflite_benchmark
+ else
+ DRIVER_BIN=$BENCHMARK_DRIVER_BIN
+ fi
- # ACL(OpenCL)
- LOG_FILE=$REPORT_DIR/$BENCHMARK_ACL_BIN_BASENAME/acl_opencl.txt
- RESULT_FILE=$REPORT_DIR/$BENCHMARK_ACL_BIN_BASENAME/acl_opencl.result
- echo -n "ACL(OpenCL).... "
- $BENCHMARK_ACL_BIN 1 > $LOG_FILE 2>&1
- RESULT=`grep -E '^Mean:' $LOG_FILE | sed -e 's/ms//g' | awk '{print $2}'`
- echo "$RESULT ms"
- echo "ACL(OpenCL)" $RESULT > $RESULT_FILE
+ $TEST_DRIVER_DIR/run_benchmark_op.sh \
+ --runtestsh=$RUN_TEST_SH \
+ --driverbin=$DRIVER_BIN \
+ --reportdir=$REPORT_DIR/benchmark_op \
+ --modelfilepath=$ARTIFACT_PATH/tests/framework \
+ --frameworktest_list_file=${FRAMEWORKTEST_LIST_FILE:-}
+fi
- if [[ $i -ne $(ls $DRIVER_DIR/benchmark_* | wc -w)-1 ]]; then
- echo ""
- fi
- i=$((i+1))
- done
- echo "============================================"
- unset COUNT
-}
+# Run benchmark/acl/benchmark_googlenet, mobilenet and inception_v3
+if [ "$BENCHMARK_ACL_ON" == "true" ]; then
+ $TEST_DRIVER_DIR/run_benchmark_acl.sh \
+ --reportdir=$AREPORT_DIR/benchmark \
+ --bindir=$ARTIFACT_PATH/Product/out/bin
+fi
-if [ "$BENCHMARK_ON" == "true" ] || [ "$BENCHMARK_ACL_ON" == "true" ]; then
- if [ ! -e "$ARTIFACT_PATH/report" ]; then
- mkdir -p $ARTIFACT_PATH/report
+# Make json file. Actually, this process is only needed on CI. That's why it is in test_driver.sh.
+if [ "$BENCHMARK_ON" == "true" ] || [ "$BENCHMARK_ACL_ON" == "true" ] || [ "$BENCHMARK_OP_ON" == "true" ]; then
+ # functions to fill json with benchmark results
+ source $ARTIFACT_PATH/tools/test_driver/print_to_json.sh
+ if [ "$BENCHMARK_OP_ON" == "true" ]; then
+ print_to_json $REPORT_DIR/benchmark_op $REPORT_DIR "benchmark_op_result.json"
+ else
+ print_to_json $REPORT_DIR/benchmark $REPORT_DIR "benchmark_result.json"
fi
- REPORT_MODEL_DIR=$ARTIFACT_PATH/report/benchmark
- mkdir -p $REPORT_MODEL_DIR
- MODELS_FILE=$ARTIFACT_PATH/report/benchmark/benchmark_models.txt
- rm -f $MODELS_FILE
fi
-# Run tflite_benchmark with Iv3_non_slim & Iv3_slim
-if [ "$BENCHMARK_ON" == "true" ]; then
- echo ""
- run_benchmark_test
- echo ""
+# Run tflite_benchmark_model (= per-operation profiling tool).
+# Each model can contain arbitrary number of operators.
+if [ "$BENCHMARK_TFLITE_MODEL_ON" == "true" ]; then
+ $TEST_DRIVER_DIR/run_benchmark_tflite_model.sh \
+ --reportdir=$REPORT_DIR/benchmark_tflite_model \
+ --modelroot=$ARTIFACT_PATH/tests/framework/tests
fi
-# Run benchmark/acl/benchmark_googlenet,mobilenet and inception_v3
-if [ "$BENCHMARK_ACL_ON" == "true" ]; then
+# Run profiling
+if [ "$PROFILE_ON" == "true" ]; then
+ # FIXME: These driver and tflite test are set temporarily. Fix these to support flexibility
+ DRIVER_BIN=$ARTIFACT_PATH/Product/out/bin/tflite_run
+ TFLITE_TEST=$ARTIFACT_PATH/tests/framework/cache/inceptionv3/inception_module/inception_test.tflite
+
+ # TODO: Enable operf to set directory where sample data puts on
+ rm -rf oprofile_data
+
echo ""
- run_benchmark_acl
+ echo "============================================"
+ operf -g $DRIVER_BIN $TFLITE_TEST
+ echo "============================================"
echo ""
fi
-
-# make json file
-if [ "$BENCHMARK_ON" == "true" ] || [ "$BENCHMARK_ACL_ON" == "true" ]; then
- # functions to fill json with benchmark results
- source $ARTIFACT_PATH/tools/test_driver/print_to_json.sh
- print_to_json
-fi