Skip to content

Commit

Permalink
run scripts for cpp,reference added
Browse files Browse the repository at this point in the history
  • Loading branch information
arjunsuresh committed Aug 5, 2023
1 parent 0c856c2 commit fce3db2
Show file tree
Hide file tree
Showing 3 changed files with 255 additions and 0 deletions.
138 changes: 138 additions & 0 deletions cm-mlops/script/run-all-mlperf-models/run-cpp-implementation.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,138 @@
#!/bin/bash

#CM Script location: ${CM_TMP_CURRENT_SCRIPT_PATH}

#To export any variable
#echo "VARIABLE_NAME=VARIABLE_VALUE" >>tmp-run-env.out

#${CM_PYTHON_BIN_WITH_PATH} contains the path to python binary if "get,python" is added as a dependency



function exit_if_error() {
test $? -eq 0 || exit $?
}

function run() {
echo "Running: "
echo "$1"
echo ""
if [[ ${CM_FAKE_RUN} != 'yes' ]]; then
eval "$1"
exit_if_error
fi
}
division="closed"
#Add your run commands here...
# run "$CM_RUN_CMD"

POWER=" --power=yes --adr.mlperf-power-client.power_server=192.168.0.15 --adr.mlperf-power-client.port=4950 "

#cpp
run "cm run script --tags=generate-run-cmds,inference,_find-performance \
--model=resnet50 --implementation=cpp --device=cpu --backend=onnxruntime \
--adr.compiler.tags=gcc \
--category=edge --division=open --scenario=Offline --quiet --test_query_count=100"

run "cm run script --tags=generate-run-cmds,inference,_find-performance \
--model=retinanet --implementation=cpp --device=cpu --backend=onnxruntime \
--adr.compiler.tags=gcc \
--category=edge --division=open --scenario=Offline --quiet"


run "cm run script --tags=generate-run-cmds,inference,_submission \
--model=resnet50 --implementation=cpp --device=cpu --backend=onnxruntime \
--scenario=Offline \
--category=edge --division=$division --quiet \
--adr.compiler.tags=gcc \
--execution-mode=valid \
--skip_submission_generation=yes \
${POWER} \
--results_dir=$HOME/results_dir"

run "cm run script --tags=generate-run-cmds,inference,_submission \
--model=retinanet --implementation=cpp --device=cpu --backend=onnxruntime \
--scenario=Offline \
--category=edge --division=$division --quiet \
--adr.compiler.tags=gcc \
--execution-mode=valid \
--skip_submission_generation=yes \
${POWER} \
--results_dir=$HOME/results_dir"

run "cm run script --tags=generate-run-cmds,inference,_submission \
--model=resnet50 --implementation=cpp --device=cpu --backend=onnxruntime \
--scenario=SingleStream \
--category=edge --division=$division --quiet \
--adr.compiler.tags=gcc \
--execution-mode=valid \
--skip_submission_generation=yes \
${POWER} \
--results_dir=$HOME/results_dir"

run "cm run script --tags=generate-run-cmds,inference,_submission \
--model=retinanet --implementation=cpp --device=cpu --backend=onnxruntime \
--scenario=SingleStream \
--category=edge --division=$division --quiet \
--adr.compiler.tags=gcc \
--execution-mode=valid \
--skip_submission_generation=yes \
${POWER} \
--results_dir=$HOME/results_dir"

# GPU

run "cm run script --tags=generate-run-cmds,inference,_find-performance \
--model=resnet50 --implementation=cpp --device=cuda --backend=onnxruntime \
--adr.compiler.tags=gcc \
--execution-mode=valid \
--category=edge --division=open --scenario=Offline --quiet --test_query_count=100"

run "cm run script --tags=generate-run-cmds,inference,_find-performance \
--model=retinanet --implementation=cpp --device=cuda --backend=onnxruntime \
--adr.compiler.tags=gcc \
--execution-mode=valid \
--category=edge --division=open --scenario=Offline --quiet"


run "cm run script --tags=generate-run-cmds,inference,_submission \
--scenario=Offline \
--model=resnet50 --implementation=cpp --device=cuda --backend=onnxruntime \
--category=edge --division=$division --quiet \
--adr.compiler.tags=gcc \
--execution-mode=valid \
--skip_submission_generation=yes \
${POWER} \
--results_dir=$HOME/results_dir"

run "cm run script --tags=generate-run-cmds,inference,_submission \
--model=retinanet --implementation=cpp --device=cuda --backend=onnxruntime \
--scenario=Offline \
--category=edge --division=$division --quiet \
--adr.compiler.tags=gcc \
--execution-mode=valid \
--skip_submission_generation=yes \
${POWER} \
--results_dir=$HOME/results_dir"


run "cm run script --tags=generate-run-cmds,inference,_submission \
--scenario=Offline \
--model=resnet50 --implementation=cpp --device=cuda --backend=onnxruntime \
--scenario=SingleStream \
--category=edge --division=$division --quiet \
--adr.compiler.tags=gcc \
--execution-mode=valid \
--skip_submission_generation=yes \
${POWER} \
--results_dir=$HOME/results_dir"

run "cm run script --tags=generate-run-cmds,inference,_submission \
--model=retinanet --implementation=cpp --device=cuda --backend=onnxruntime \
--scenario=SingleStream \
--category=edge --division=$division --quiet \
--adr.compiler.tags=gcc \
--execution-mode=valid \
--skip_submission_generation=yes \
${POWER} \
--results_dir=$HOME/results_dir"
67 changes: 67 additions & 0 deletions cm-mlops/script/run-all-mlperf-models/run-reference-models.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
#!/bin/bash

#CM Script location: ${CM_TMP_CURRENT_SCRIPT_PATH}

#To export any variable
#echo "VARIABLE_NAME=VARIABLE_VALUE" >>tmp-run-env.out

#${CM_PYTHON_BIN_WITH_PATH} contains the path to python binary if "get,python" is added as a dependency



function exit_if_error() {
test $? -eq 0 || exit $?
}

function run() {
echo "Running: "
echo "$1"
echo ""
if [[ ${CM_FAKE_RUN} != 'yes' ]]; then
eval "$1"
exit_if_error
fi
}
division="closed"
#Add your run commands here...
# run "$CM_RUN_CMD"
run "cm run script --tags=generate-run-cmds,inference,_find-performance \
--model=resnet50 --implementation=reference --device=cpu --backend=onnxruntime \
--category=edge --division=open --scenario=Offline --quiet --test_query_count=100"

run "cm run script --tags=generate-run-cmds,inference,_find-performance \
--model=rnnt --implementation=reference --device=cpu --backend=pytorch \
--category=edge --division=open --scenario=Offline --quiet"

run "cm run script --tags=generate-run-cmds,inference,_find-performance \
--model=retinanet --implementation=reference --device=cpu --backend=pytorch \
--category=edge --division=open --scenario=Offline --quiet"

run "cm run script --tags=generate-run-cmds,inference,_find-performance \
--model=bert-99 --implementation=reference --device=cpu --backend=pytorch \
--category=edge --division=open --scenario=Offline --quiet"

run "cm run script --tags=generate-run-cmds,inference,_find-performance \
--model=3d-unet-99 --implementation=reference --device=cpu --backend=pytorch \
--category=edge --division=open --scenario=Offline --quiet"

run "cm run script --tags=generate-run-cmds,inference,_submission,_all-scenarios \
--model=resnet50 --implementation=reference --device=cpu --backend=onnxruntime \
--category=edge --division=$division --quiet"

run "cm run script --tags=generate-run-cmds,inference,_submission,_all-scenarios \
--model=rnnt --implementation=reference --device=cpu --backend=pytorch \
--category=edge --division=$division --quiet"

run "cm run script --tags=generate-run-cmds,inference,_submission,_all-scenarios \
--model=retinanet --implementation=reference --device=cpu --backend=pytorch \
--category=edge --division=$division --quiet"

run "cm run script --tags=generate-run-cmds,inference,_submission,_all-scenarios \
--model=bert-99 --implementation=reference --device=cpu --backend=pytorch \
--category=edge --division=$division --quiet"

run "cm run script --tags=generate-run-cmds,inference,_submission,_all-scenarios \
--model=3d-unet-99 --implementation=reference --device=cpu --backend=pytorch \
--category=edge --division=$division --quiet"

50 changes: 50 additions & 0 deletions cm-mlops/script/run-all-mlperf-models/run-resnet.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
#!/bin/bash

#CM Script location: ${CM_TMP_CURRENT_SCRIPT_PATH}

#To export any variable
#echo "VARIABLE_NAME=VARIABLE_VALUE" >>tmp-run-env.out

#${CM_PYTHON_BIN_WITH_PATH} contains the path to python binary if "get,python" is added as a dependency



function exit_if_error() {
test $? -eq 0 || exit $?
}

function run() {
echo "Running: "
echo "$1"
echo ""
if [[ ${CM_FAKE_RUN} != 'yes' ]]; then
eval "$1"
exit_if_error
fi
}
division="closed"
#Add your run commands here...
# run "$CM_RUN_CMD"
run "cm run script --tags=generate-run-cmds,inference,_find-performance \
--model=resnet50 --implementation=reference --device=cpu --backend=onnxruntime \
--category=edge --division=open --scenario=Offline --quiet --test_query_count=100"

run "cm run script --tags=generate-run-cmds,inference,_find-performance \
--model=resnet50 --implementation=reference --device=cpu --backend=tf \
--category=edge --division=open --scenario=Offline --quiet"

run "cm run script --tags=generate-run-cmds,inference,_find-performance \
--model=resnet50 --implementation=tflite-cpp --device=cpu --backend=tflite \
--category=edge --division=open --scenario=Offline --quiet"

run "cm run script --tags=generate-run-cmds,inference,_submission,_all-scenarios \
--model=resnet50 --implementation=reference --device=cpu --backend=onnxruntime \
--category=edge --division=$division --quiet"

run "cm run script --tags=generate-run-cmds,inference,_submission,_all-scenarios \
--model=resnet50 --implementation=reference --device=cpu --backend=tf \
--category=edge --division=$division --quiet"

run "cm run script --tags=generate-run-cmds,inference,_submission,_all-scenarios \
--model=resnet50 --implementation=tflite-cpp --device=cpu --backend=tflite \
--category=edge --division=$division --quiet"

0 comments on commit fce3db2

Please sign in to comment.