summaryrefslogtreecommitdiff
path: root/tests/scripts/test-driver.sh
blob: 932339ae84e29323f52f0ad3abf11fc376b48f71 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
#!/bin/bash
#
# Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#    http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

set -e
# NOTE: Supposed that this script would be executed with an artifact path.
#       The artifact path has tests/(test suite) and Product/
#       Reference this PR(https://github.sec.samsung.net/STAR/nnfw/pull/375).

function Usage()
{
    echo "Usage: ./$0 --artifactpath=.    # run all tests"
    echo "Usage: ./$0 --artifactpath=/home/dragon/nnfw --frameworktest --verification --benchmark    # run fw test & verfication and benchmark"
    echo ""
    echo "--artifactpath            - (default={test-driver.sh's path}/../../) it should contain tests/ and Product/"
    echo ""
    echo "Following options are needed when you want to tests of specific types. If you don't pass any one, unittest and verification will be run"
    echo "--unittest                - (default=on) run unit test"
    echo "--frameworktest           - (default=off) run framework test"
    echo "--verification            - (default=on) run verification"
    echo "--frameworktest_list_file - filepath of model list for test"
    echo ""
    echo "Following option is only needed when you want to test benchmark."
    echo "--benchmark_neurun_op     - (default=off) run benchmark per operation on neurun"
    echo ""
    echo "etc."
    echo "--framework_driverbin     - (default=../../Product/out/bin/tflite_run) runner for runnning framework tests"
    echo "--verification_driverbin  - (default=../../Product/out/bin/nnapi_test) runner for runnning verification tests"
    echo "--benchmark_driverbin     - (default=../../Product/out/bin/tflite_benchmark) runner for runnning benchmark"
    echo "--runtestsh               - (default=\$ARTIFACT_PATH/tests/framework/run_test.sh) run_test.sh with path where it is for framework test and verification"
    echo "--unittestdir             - (default=\$ARTIFACT_PATH/Product/out/unittest) directory that has unittest binaries for unit test"
    echo ""
    echo "--reportdir               - (default=\$ARTIFACT_PATH/report) directory to save report"
    echo ""
}

TEST_DRIVER_DIR="$( cd "$( dirname "${BASH_SOURCE}" )" && pwd )"
ARTIFACT_PATH="$TEST_DRIVER_DIR/../../"
FRAMEWORK_DRIVER_BIN=""
VERIFICATION_DRIVER_BIN=""
BENCHMARK_DRIVER_BIN=""
RUN_TEST_SH=""
UNIT_TEST_DIR=""
ALLTEST_ON="true"
UNITTEST_ON="false"
FRAMEWORKTEST_ON="false"
VERIFICATION_ON="false"
BENCHMARK_NEURUN_OP_ON="false"
REPORT_DIR=""

for i in "$@"
do
    case $i in
        -h|--help|help)
            Usage
            exit 1
            ;;
        --artifactpath=*)
            ARTIFACT_PATH=${i#*=}
            ;;
        --framework_driverbin=*)
            FRAMEWORK_DRIVER_BIN=${i#*=}
            ;;
        --verification_driverbin=*)
            VERIFICATION_DRIVER_BIN=${i#*=}
            ;;
        --benchmark_driverbin=*)
            BENCHMARK_DRIVER_BIN=${i#*=}
            ;;
        --runtestsh=*)
            RUN_TEST_SH=${i#*=}
            ;;
        --unittestdir=*)
            UNIT_TEST_DIR=${i#*=}
            ;;
        --unittest)
            ALLTEST_ON="false"
            UNITTEST_ON="true"
            ;;
        --frameworktest)
            ALLTEST_ON="false"
            FRAMEWORKTEST_ON="true"
            ;;
        --frameworktest_list_file=*)
            FRAMEWORKTEST_LIST_FILE=$PWD/${i#*=}
            if [ ! -e "$FRAMEWORKTEST_LIST_FILE" ]; then
                echo "Pass on with proper frameworktest_list_file"
                exit 1
            fi
            ;;
        --verification)
            ALLTEST_ON="false"
            VERIFICATION_ON="true"
            ;;
        --benchmark_neurun_op)
            ALLTEST_ON="false"
            BENCHMARK_NEURUN_OP_ON="true"
            ;;
        --reportdir=*)
            REPORT_DIR=${i#*=}
            ;;
        *)
            # Be careful that others params are handled as $ARTIFACT_PATH
            ARTIFACT_PATH="$i"
            ;;
    esac
    shift
done

ARTIFACT_PATH="$(readlink -f $ARTIFACT_PATH)"

if [ -z "$RUN_TEST_SH" ]; then
    RUN_TEST_SH=$ARTIFACT_PATH/tests/framework/run_test.sh
fi

if [ ! -e "$RUN_TEST_SH" ]; then
    echo "Cannot find $RUN_TEST_SH"
    exit 1
fi

if [ -z "$UNIT_TEST_DIR" ]; then
    UNIT_TEST_DIR=$ARTIFACT_PATH/Product/out/unittest
fi

if [ -z "$REPORT_DIR" ]; then
    REPORT_DIR=$ARTIFACT_PATH/report
fi

source $TEST_DRIVER_DIR/common.sh

# Run unittest in each part such as Runtime
if [ "$ALLTEST_ON" == "true" ] || [ "$UNITTEST_ON" == "true" ]; then
    $TEST_DRIVER_DIR/unittest.sh \
        --reportdir=$REPORT_DIR \
        --unittestdir=$UNIT_TEST_DIR
fi

# Run tflite_run with various tflite models
if [ "$FRAMEWORKTEST_ON" == "true" ]; then
    if [ -z "$FRAMEWORK_DRIVER_BIN" ]; then
        FRAMEWORK_DRIVER_BIN=$ARTIFACT_PATH/Product/out/bin/tflite_run
    fi

    $TEST_DRIVER_DIR/test_framework.sh \
        --runtestsh=$RUN_TEST_SH \
        --driverbin=$FRAMEWORK_DRIVER_BIN \
        --reportdir=$REPORT_DIR \
        --tapname=framework_test.tap \
        --logname=framework_test.log \
        --testname="Frameworktest" \
        --frameworktest_list_file=${FRAMEWORKTEST_LIST_FILE:-}
fi

# Run nnapi_test with various tflite models
if [ "$ALLTEST_ON" == "true" ] || [ "$VERIFICATION_ON" == "true" ]; then
    if [ -z "$VERIFICATION_DRIVER_BIN" ]; then
        VERIFICATION_DRIVER_BIN=$ARTIFACT_PATH/Product/out/bin/nnapi_test
    fi

    # verification uses the same script as frameworktest does
    $TEST_DRIVER_DIR/test_framework.sh \
        --runtestsh=$RUN_TEST_SH \
        --driverbin=$VERIFICATION_DRIVER_BIN \
        --reportdir=$REPORT_DIR \
        --tapname=verification_test.tap \
        --logname=verification_test.log \
        --testname="Verification" \
        --frameworktest_list_file=${FRAMEWORKTEST_LIST_FILE:-}
fi

if [ "$BENCHMARK_NEURUN_OP_ON" == "true" ]; then
    if [ -z "$BENCHMARK_DRIVER_BIN" ]; then
        DRIVER_BIN=$ARTIFACT_PATH/Product/out/bin/tflite_benchmark
    else
        DRIVER_BIN=$BENCHMARK_DRIVER_BIN
    fi

    $TEST_DRIVER_DIR/benchmark_nnapi.sh \
        --test_op \
        --runtestsh=$RUN_TEST_SH \
        --driverbin=$DRIVER_BIN \
        --reportdir=$REPORT_DIR/benchmark_op \
        --modelfilepath=$ARTIFACT_PATH/tests/framework
fi

# Make json file. Actually, this process is only needed on CI. That's why it is in test-driver.sh.
if [ "$BENCHMARK_NEURUN_OP_ON" == "true" ]; then
    # functions to fill json with benchmark results
    source $ARTIFACT_PATH/tests/scripts/print_to_json.sh
    if [ "$BENCHMARK_NEURUN_OP_ON" == "true" ]; then
        print_to_json $REPORT_DIR/benchmark_op $REPORT_DIR "benchmark_op_result.json"
    else
        print_to_json $REPORT_DIR/benchmark $REPORT_DIR "benchmark_result.json"
    fi
fi