forked from mlcommons/inference
-
Notifications
You must be signed in to change notification settings - Fork 0
/
run_common.sh
executable file
·115 lines (104 loc) · 2.91 KB
/
run_common.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
#!/bin/bash
if [ $# -lt 1 ]; then
echo "usage: $0 tf|onnxruntime|pytorch|tflite [resnet50|mobilenet|ssd-mobilenet|ssd-resnet34] [cpu|gpu]"
exit 1
fi
if [ "x$DATA_DIR" == "x" ]; then
echo "DATA_DIR not set" && exit 1
fi
if [ "x$MODEL_DIR" == "x" ]; then
echo "MODEL_DIR not set" && exit 1
fi
# defaults
backend=tf
model=resnet50
device="cpu"
for i in $* ; do
case $i in
tf|onnxruntime|tflite|pytorch) backend=$i; shift;;
cpu|gpu) device=$i; shift;;
gpu) device=gpu; shift;;
resnet50|mobilenet|ssd-mobilenet|ssd-resnet34|ssd-resnet34-tf) model=$i; shift;;
esac
done
if [ $device == "cpu" ] ; then
export CUDA_VISIBLE_DEVICES=""
fi
name="$model-$backend"
extra_args=""
#
# tensorflow
#
if [ $name == "resnet50-tf" ] ; then
model_path="$MODEL_DIR/resnet50_v1.pb"
profile=resnet50-tf
fi
if [ $name == "mobilenet-tf" ] ; then
model_path="$MODEL_DIR/mobilenet_v1_1.0_224_frozen.pb"
profile=mobilenet-tf
fi
if [ $name == "ssd-mobilenet-tf" ] ; then
model_path="$MODEL_DIR/ssd_mobilenet_v1_coco_2018_01_28.pb"
profile=ssd-mobilenet-tf
fi
if [ $name == "ssd-resnet34-tf" ] ; then
model_path="$MODEL_DIR/resnet34_tf.22.1.pb"
profile=ssd-resnet34-tf
fi
#
# onnxruntime
#
if [ $name == "resnet50-onnxruntime" ] ; then
model_path="$MODEL_DIR/resnet50_v1.onnx"
profile=resnet50-onnxruntime
fi
if [ $name == "mobilenet-onnxruntime" ] ; then
model_path="$MODEL_DIR/mobilenet_v1_1.0_224.onnx"
profile=mobilenet-onnxruntime
fi
if [ $name == "ssd-mobilenet-onnxruntime" ] ; then
model_path="$MODEL_DIR/ssd_mobilenet_v1_coco_2018_01_28.onnx"
profile=ssd-mobilenet-onnxruntime
fi
if [ $name == "ssd-resnet34-onnxruntime" ] ; then
# use onnx model converted from pytorch
model_path="$MODEL_DIR/resnet34-ssd1200.onnx"
profile=ssd-resnet34-onnxruntime
fi
if [ $name == "ssd-resnet34-tf-onnxruntime" ] ; then
# use onnx model converted from tensorflow
model_path="$MODEL_DIR/ssd_resnet34_mAP_20.2.onnx"
profile=ssd-resnet34-onnxruntime-tf
fi
#
# pytorch
#
if [ $name == "resnet50-pytorch" ] ; then
model_path="$MODEL_DIR/resnet50_v1.onnx"
profile=resnet50-onnxruntime
extra_args="$extra_args --backend pytorch"
fi
if [ $name == "mobilenet-pytorch" ] ; then
model_path="$MODEL_DIR/mobilenet_v1_1.0_224.onnx"
profile=mobilenet-onnxruntime
extra_args="$extra_args --backend pytorch"
fi
if [ $name == "ssd-resnet34-pytorch" ] ; then
model_path="$MODEL_DIR/resnet34-ssd1200.pytorch"
profile=ssd-resnet34-pytorch
fi
#
# tflite
#
if [ $name == "resnet50-tflite" ] ; then
model_path="$MODEL_DIR/resnet50_v1.tflite"
profile=resnet50-tf
extra_args="$extra_args --backend tflite"
fi
if [ $name == "mobilenet-tflite" ] ; then
model_path="$MODEL_DIR/mobilenet_v1_1.0_224.tflite"
profile=mobilenet-tf
extra_args="$extra_args --backend tflite"
fi
name="$backend-$device/$model"
EXTRA_OPS="$extra_args $EXTRA_OPS"