diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml new file mode 100644 index 0000000000..5df31b971e --- /dev/null +++ b/.github/workflows/publish.yaml @@ -0,0 +1,33 @@ +# This is a basic workflow to help you get started with Actions + +name: Publish site + + +on: + release: + types: [published] + push: + branches: + - main + - docs + +jobs: + + publish: + name: Publish the site + runs-on: ubuntu-latest + + steps: + - name: Checkout repository normally + uses: actions/checkout@v3 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.11" + + - name: Install Mkdocs + run: pip install -r docs/requirements.txt + + - name: Run Mkdocs deploy + run: mkdocs gh-deploy --force diff --git a/docs/AI-ML-datasets/get-croissant.md b/docs/AI-ML-datasets/get-croissant.md new file mode 100644 index 0000000000..3c62b3bc55 --- /dev/null +++ b/docs/AI-ML-datasets/get-croissant.md @@ -0,0 +1,126 @@ +Automatically generated README for this automation recipe: **get-croissant** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-croissant,8fd653eac8da4c14) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-croissant)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *get,mlcommons,croissant* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get mlcommons croissant" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,mlcommons,croissant` + +`cm run script --tags=get,mlcommons,croissant ` + +*or* + +`cmr "get mlcommons croissant"` + +`cmr "get mlcommons croissant " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,mlcommons,croissant' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,mlcommons,croissant"``` + +#### Run this script via Docker (beta) + +`cm docker script "get mlcommons croissant" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-croissant/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,python3 + * CM names: `--adr.['python3', 'python']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * git,repo,_repo.https://github.com/mlcommons/croissant + * CM names: `--adr.['git-mlcommons-croissant']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-croissant/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-croissant/_cm.yaml) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-croissant/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-croissant/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-croissant/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-croissant/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-croissant/_cm.yaml) + +___ +### Script output +`cmr "get mlcommons croissant " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-datasets/get-dataset-cifar10.md b/docs/AI-ML-datasets/get-dataset-cifar10.md new file mode 100644 index 0000000000..e6caa091c7 --- /dev/null +++ b/docs/AI-ML-datasets/get-dataset-cifar10.md @@ -0,0 +1,164 @@ +Automatically generated README for this automation recipe: **get-dataset-cifar10** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-cifar10,2f0c0bb3663b4ed7) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cifar10)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,dataset,cifar10,image-classification,validation,training* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get dataset cifar10 image-classification validation training" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,dataset,cifar10,image-classification,validation,training` + +`cm run script --tags=get,dataset,cifar10,image-classification,validation,training[,variations] ` + +*or* + +`cmr "get dataset cifar10 image-classification validation training"` + +`cmr "get dataset cifar10 image-classification validation training [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,dataset,cifar10,image-classification,validation,training' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,dataset,cifar10,image-classification,validation,training"``` + +#### Run this script via Docker (beta) + +`cm docker script "get dataset cifar10 image-classification validation training[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_tiny` + - Environment variables: + - *CM_DATASET_CONVERT_TO_TINYMLPERF*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,tinymlperf,src + - CM script: [get-mlperf-tiny-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-tiny-src) + * get,src,eembc,energy-runner + - CM script: [get-mlperf-tiny-eembc-energy-runner-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-tiny-eembc-energy-runner-src) + +
+ + + * Group "**data_format**" +
+ Click here to expand this section. + + * **`_python`** (default) + - Environment variables: + - *CM_DATASET*: `CIFAR10` + - *CM_DATASET_FILENAME*: `cifar-10-python.tar.gz` + - *CM_DATASET_FILENAME1*: `cifar-10-python.tar` + - *CM_DATASET_CIFAR10*: `https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz` + - Workflow: + +
+ + +#### Default variations + +`_python` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cifar10/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cifar10/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cifar10/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cifar10/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cifar10/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cifar10/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cifar10/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cifar10/_cm.json) + +___ +### Script output +`cmr "get dataset cifar10 image-classification validation training [,variations]" -j` +#### New environment keys (filter) + +* `CM_DATASET_*` +#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-datasets/get-dataset-cnndm.md b/docs/AI-ML-datasets/get-dataset-cnndm.md new file mode 100644 index 0000000000..85be98b6aa --- /dev/null +++ b/docs/AI-ML-datasets/get-dataset-cnndm.md @@ -0,0 +1,175 @@ +Automatically generated README for this automation recipe: **get-dataset-cnndm** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-cnndm,aed298c156e24257) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cnndm)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,dataset,gpt-j,cnndm,cnn-dailymail,original* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get dataset gpt-j cnndm cnn-dailymail original" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,dataset,gpt-j,cnndm,cnn-dailymail,original` + +`cm run script --tags=get,dataset,gpt-j,cnndm,cnn-dailymail,original[,variations] ` + +*or* + +`cmr "get dataset gpt-j cnndm cnn-dailymail original"` + +`cmr "get dataset gpt-j cnndm cnn-dailymail original [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,dataset,gpt-j,cnndm,cnn-dailymail,original' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,dataset,gpt-j,cnndm,cnn-dailymail,original"``` + +#### Run this script via Docker (beta) + +`cm docker script "get dataset gpt-j cnndm cnn-dailymail original[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_intel` + - Workflow: + * `_intel,validation` + - Environment variables: + - *CM_CNNDM_INTEL_VARIATION*: `yes` + - Workflow: + +
+ + + * Group "**dataset-type**" +
+ Click here to expand this section. + + * `_calibration` + - Environment variables: + - *CM_DATASET_CALIBRATION*: `yes` + - Workflow: + * **`_validation`** (default) + - Environment variables: + - *CM_DATASET_CALIBRATION*: `no` + - Workflow: + +
+ + +#### Default variations + +`_validation` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_DATASET_CALIBRATION: `no` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cnndm/_cm.json)*** + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * mlperf,inference,source + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_CNNDM_INTEL_VARIATION': ['yes']}` + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,generic-python-lib,_package.simplejson + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_datasets + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.tokenizers + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_numpy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cnndm/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cnndm/_cm.json) + 1. ***Run native script if exists*** + * [run-intel.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cnndm/run-intel.sh) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cnndm/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cnndm/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cnndm/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cnndm/_cm.json) + +___ +### Script output +`cmr "get dataset gpt-j cnndm cnn-dailymail original [,variations]" -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-datasets/get-dataset-coco.md b/docs/AI-ML-datasets/get-dataset-coco.md new file mode 100644 index 0000000000..33aded32e1 --- /dev/null +++ b/docs/AI-ML-datasets/get-dataset-coco.md @@ -0,0 +1,215 @@ +Automatically generated README for this automation recipe: **get-dataset-coco** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-coco,c198e1f60ac6445c) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,dataset,object-detection,coco* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get dataset object-detection coco" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,dataset,object-detection,coco` + +`cm run script --tags=get,dataset,object-detection,coco[,variations] [--input_flags]` + +*or* + +`cmr "get dataset object-detection coco"` + +`cmr "get dataset object-detection coco [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,dataset,object-detection,coco' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,dataset,object-detection,coco"``` + +#### Run this script via Docker (beta) + +`cm docker script "get dataset object-detection coco[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * Group "**size**" +
+ Click here to expand this section. + + * **`_complete`** (default) + - Environment variables: + - *CM_DATASET_COCO_SIZE*: `complete` + - Workflow: + * `_small` + - Environment variables: + - *CM_DATASET_COCO_SIZE*: `small` + - Workflow: + +
+ + + * Group "**type**" +
+ Click here to expand this section. + + * `_train` + - Environment variables: + - *CM_DATASET_COCO_TYPE*: `train` + - Workflow: + * **`_val`** (default) + - Environment variables: + - *CM_DATASET_COCO_TYPE*: `val` + - Workflow: + +
+ + + * Group "**version**" +
+ Click here to expand this section. + + * **`_2017`** (default) + - Environment variables: + - *CM_DATASET_COCO_VERSION*: `2017` + - Workflow: + +
+ + +#### Default variations + +`_2017,_complete,_val` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--from=value` → `CM_FROM=value` +* `--home=value` → `CM_HOME_DIR=value` +* `--store=value` → `CM_STORE=value` +* `--to=value` → `CM_TO=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "from":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco/_cm.json)*** + * download-and-extract,file,_wget,_extract + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_DATASET_COCO_DETECTED': ['yes']}` + * CM names: `--adr.['get-dataset-coco-data', '746e5dad5e784ad6']...` + - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) + * download-and-extract,file,_wget,_extract + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_DATASET_COCO_DETECTED': ['yes']}` + * CM names: `--adr.['get-dataset-coco-annotations', 'edb6cd092ff64171']...` + - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco/_cm.json) + +___ +### Script output +`cmr "get dataset object-detection coco [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_DATASET_COCO*` +* `CM_DATASET_PATH` +* `CM_DATASET_PATH_ROOT` +#### New environment keys auto-detected from customize + +* `CM_DATASET_COCO_ANNOTATIONS_PATH` +* `CM_DATASET_COCO_DATA_PATH` +* `CM_DATASET_COCO_DETECTED` +* `CM_DATASET_COCO_MD5SUM_ANN` +* `CM_DATASET_COCO_MD5SUM_DATA` +* `CM_DATASET_COCO_PATH` +* `CM_DATASET_COCO_TYPE` +* `CM_DATASET_COCO_TYPE_AND_VERSION` +* `CM_DATASET_COCO_URL_ANNOTATIONS_FULL` +* `CM_DATASET_COCO_URL_DATA_FULL` +* `CM_DATASET_COCO_VERSION` +* `CM_DATASET_PATH` +* `CM_DATASET_PATH_ROOT` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-dataset-coco2014.md b/docs/AI-ML-datasets/get-dataset-coco2014.md new file mode 100644 index 0000000000..e13dc04feb --- /dev/null +++ b/docs/AI-ML-datasets/get-dataset-coco2014.md @@ -0,0 +1,204 @@ +Automatically generated README for this automation recipe: **get-dataset-coco2014** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-coco2014,3f7ad9d42f4040f8) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco2014)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *get,dataset,coco2014,object-detection,original* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get dataset coco2014 object-detection original" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,dataset,coco2014,object-detection,original` + +`cm run script --tags=get,dataset,coco2014,object-detection,original[,variations] ` + +*or* + +`cmr "get dataset coco2014 object-detection original"` + +`cmr "get dataset coco2014 object-detection original [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,dataset,coco2014,object-detection,original' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,dataset,coco2014,object-detection,original"``` + +#### Run this script via Docker (beta) + +`cm docker script "get dataset coco2014 object-detection original[variations]" ` + +___ +### Customization + + +#### Variations + + * Group "**annotations**" +
+ Click here to expand this section. + + * `_custom-annotations` + - Environment variables: + - *CM_DATASET_COCO2014_CUSTOM_ANNOTATIONS*: `yes` + - Workflow: + * **`_default-annotations`** (default) + - Environment variables: + - *CM_DATASET_COCO2014_CUSTOM_ANNOTATIONS*: `no` + - Workflow: + +
+ + + * Group "**dataset-type**" +
+ Click here to expand this section. + + * `_calibration` + - Environment variables: + - *CM_DATASET_CALIBRATION*: `yes` + - Workflow: + * **`_validation`** (default) + - Environment variables: + - *CM_DATASET_CALIBRATION*: `no` + - Workflow: + +
+ + + * Group "**size**" +
+ Click here to expand this section. + + * **`_50`** (default) + - Environment variables: + - *CM_DATASET_SIZE*: `50` + - Workflow: + * `_500` + - Environment variables: + - *CM_DATASET_SIZE*: `500` + - Workflow: + * `_full` + - Environment variables: + - *CM_DATASET_SIZE*: `` + - Workflow: + * `_size.#` + - Environment variables: + - *CM_DATASET_SIZE*: `#` + - Workflow: + +
+ + +#### Default variations + +`_50,_default-annotations,_validation` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_DATASET_CALIBRATION: `no` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco2014/_cm.yaml)*** + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-python-lib,_package.tqdm + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * mlperf,inference,source + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco2014/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco2014/_cm.yaml) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco2014/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco2014/run.sh) + 1. ***Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco2014/_cm.yaml)*** + * get,coco2014,annotations + * Enable this dependency only if all ENV vars are set:
+`{'CM_DATASET_COCO2014_CUSTOM_ANNOTATIONS': ['yes']}` + - *Warning: no scripts found* + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco2014/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco2014/_cm.yaml) + +___ +### Script output +`cmr "get dataset coco2014 object-detection original [,variations]" -j` +#### New environment keys (filter) + +* `CM_CALIBRATION_DATASET_PATH` +* `CM_DATASET_ANNOTATIONS_DIR_PATH` +* `CM_DATASET_ANNOTATIONS_FILE_PATH` +* `CM_DATASET_PATH` +* `CM_DATASET_PATH_ROOT` +#### New environment keys auto-detected from customize + +* `CM_CALIBRATION_DATASET_PATH` +* `CM_DATASET_PATH` +* `CM_DATASET_PATH_ROOT` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-dataset-criteo.md b/docs/AI-ML-datasets/get-dataset-criteo.md new file mode 100644 index 0000000000..5f2b29d83e --- /dev/null +++ b/docs/AI-ML-datasets/get-dataset-criteo.md @@ -0,0 +1,154 @@ +Automatically generated README for this automation recipe: **get-dataset-criteo** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-criteo,194a47d908714897) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-criteo)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,dataset,criteo,original* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get dataset criteo original" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,dataset,criteo,original` + +`cm run script --tags=get,dataset,criteo,original[,variations] [--input_flags]` + +*or* + +`cmr "get dataset criteo original"` + +`cmr "get dataset criteo original [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,dataset,criteo,original' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,dataset,criteo,original"``` + +#### Run this script via Docker (beta) + +`cm docker script "get dataset criteo original[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_backup` + - Environment variables: + - *CM_BACKUP_ZIPS*: `yes` + - Workflow: + * `_fake` + - Environment variables: + - *CM_CRITEO_FAKE*: `yes` + - Workflow: + +
+ + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--criteo_path=value` → `CM_CRITEO_PATH=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "criteo_path":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_BACKUP_ZIPS: `no` + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-criteo/_cm.json) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-criteo/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-criteo/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-criteo/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-criteo/_cm.json) + +___ +### Script output +`cmr "get dataset criteo original [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_DATASET*` +#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-datasets/get-dataset-imagenet-aux.md b/docs/AI-ML-datasets/get-dataset-imagenet-aux.md new file mode 100644 index 0000000000..e5d3a126c8 --- /dev/null +++ b/docs/AI-ML-datasets/get-dataset-imagenet-aux.md @@ -0,0 +1,155 @@ +Automatically generated README for this automation recipe: **get-dataset-imagenet-aux** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-imagenet-aux,bb2c6dd8c8c64217) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-aux)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,aux,dataset-aux,image-classification,imagenet-aux* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get aux dataset-aux image-classification imagenet-aux" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,aux,dataset-aux,image-classification,imagenet-aux` + +`cm run script --tags=get,aux,dataset-aux,image-classification,imagenet-aux[,variations] ` + +*or* + +`cmr "get aux dataset-aux image-classification imagenet-aux"` + +`cmr "get aux dataset-aux image-classification imagenet-aux [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,aux,dataset-aux,image-classification,imagenet-aux' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,aux,dataset-aux,image-classification,imagenet-aux"``` + +#### Run this script via Docker (beta) + +`cm docker script "get aux dataset-aux image-classification imagenet-aux[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_2012` + - Environment variables: + - *CM_DATASET_AUX_VER*: `2012` + - Workflow: + +
+ + + * Group "**download-source**" +
+ Click here to expand this section. + + * `_from.berkeleyvision` + - Environment variables: + - *CM_WGET_URL*: `http://dl.caffe.berkeleyvision.org/caffe_ilsvrc12.tar.gz` + - Workflow: + * **`_from.dropbox`** (default) + - Environment variables: + - *CM_WGET_URL*: `https://www.dropbox.com/s/92n2fyej3lzy3s3/caffe_ilsvrc12.tar.gz` + - Workflow: + +
+ + +#### Default variations + +`_from.dropbox` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-aux/_cm.json) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-aux/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-aux/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-aux/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-aux/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-aux/_cm.json) + +___ +### Script output +`cmr "get aux dataset-aux image-classification imagenet-aux [,variations]" -j` +#### New environment keys (filter) + +* `CM_DATASET_AUX_*` +#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-datasets/get-dataset-imagenet-calibration.md b/docs/AI-ML-datasets/get-dataset-imagenet-calibration.md new file mode 100644 index 0000000000..76ae3ca526 --- /dev/null +++ b/docs/AI-ML-datasets/get-dataset-imagenet-calibration.md @@ -0,0 +1,146 @@ +Automatically generated README for this automation recipe: **get-dataset-imagenet-calibration** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-imagenet-calibration,30361fad3dff49ff) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-calibration)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *get,dataset,imagenet,calibration* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get dataset imagenet calibration" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,dataset,imagenet,calibration` + +`cm run script --tags=get,dataset,imagenet,calibration[,variations] ` + +*or* + +`cmr "get dataset imagenet calibration"` + +`cmr "get dataset imagenet calibration [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,dataset,imagenet,calibration' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,dataset,imagenet,calibration"``` + +#### Run this script via Docker (beta) + +`cm docker script "get dataset imagenet calibration[variations]" ` + +___ +### Customization + + +#### Variations + + * Group "**calibration-option**" +
+ Click here to expand this section. + + * **`_mlperf.option1`** (default) + - Environment variables: + - *CM_MLPERF_IMAGENET_CALIBRATION_OPTION*: `one` + - *CM_DOWNLOAD_CHECKSUM*: `f09719174af3553119e2c621157773a6` + - Workflow: + * `_mlperf.option2` + - Environment variables: + - *CM_MLPERF_IMAGENET_CALIBRATION_OPTION*: `two` + - *CM_DOWNLOAD_CHECKSUM*: `e44582af00e3b4fc3fac30efd6bdd05f` + - Workflow: + +
+ + +#### Default variations + +`_mlperf.option1` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-calibration/_cm.yaml)*** + * download,file + * CM names: `--adr.['calibration-file-downloader']...` + - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-calibration/_cm.yaml) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-calibration/_cm.yaml) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-calibration/_cm.yaml) + +___ +### Script output +`cmr "get dataset imagenet calibration [,variations]" -j` +#### New environment keys (filter) + +* `CM_MLPERF_IMAGENET_CALIBRATION_LIST_FILE_WITH_PATH` +#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-datasets/get-dataset-imagenet-helper.md b/docs/AI-ML-datasets/get-dataset-imagenet-helper.md new file mode 100644 index 0000000000..6ce0dc22ea --- /dev/null +++ b/docs/AI-ML-datasets/get-dataset-imagenet-helper.md @@ -0,0 +1,120 @@ +Automatically generated README for this automation recipe: **get-dataset-imagenet-helper** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-imagenet-helper,a6c3c321d07742f9) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-helper)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,imagenet,helper,imagenet-helper* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get imagenet helper imagenet-helper" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,imagenet,helper,imagenet-helper` + +`cm run script --tags=get,imagenet,helper,imagenet-helper ` + +*or* + +`cmr "get imagenet helper imagenet-helper"` + +`cmr "get imagenet helper imagenet-helper " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,imagenet,helper,imagenet-helper' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,imagenet,helper,imagenet-helper"``` + +#### Run this script via Docker (beta) + +`cm docker script "get imagenet helper imagenet-helper" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-helper/_cm.json) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-helper/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-helper/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-helper/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-helper/_cm.json) + +___ +### Script output +`cmr "get imagenet helper imagenet-helper " -j` +#### New environment keys (filter) + +* `+PYTHONPATH` +* `CM_DATASET_IMAGENET_HELPER_PATH` +#### New environment keys auto-detected from customize + +* `CM_DATASET_IMAGENET_HELPER_PATH` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-dataset-imagenet-train.md b/docs/AI-ML-datasets/get-dataset-imagenet-train.md new file mode 100644 index 0000000000..a6c7feb9f0 --- /dev/null +++ b/docs/AI-ML-datasets/get-dataset-imagenet-train.md @@ -0,0 +1,149 @@ +Automatically generated README for this automation recipe: **get-dataset-imagenet-train** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-imagenet-train,2bec165da5cc4ebf) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-train)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,imagenet,train,dataset,original* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get imagenet train dataset original" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,imagenet,train,dataset,original` + +`cm run script --tags=get,imagenet,train,dataset,original [--input_flags]` + +*or* + +`cmr "get imagenet train dataset original"` + +`cmr "get imagenet train dataset original " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,imagenet,train,dataset,original' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,imagenet,train,dataset,original"``` + +#### Run this script via Docker (beta) + +`cm docker script "get imagenet train dataset original" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--input=value` → `IMAGENET_TRAIN_PATH=value` +* `--torrent=value` → `CM_DATASET_IMAGENET_TRAIN_TORRENT_PATH=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "input":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-train/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-train/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-train/_cm.json)*** + * download-and-extract,file,_extract + * Enable this dependency only if all ENV vars are set:
+`{'CM_DATASET_IMAGENET_VAL_REQUIRE_DAE': ['yes', 'True']}` + - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) + * file,extract + * Enable this dependency only if all ENV vars are set:
+`{'CM_DAE_ONLY_EXTRACT': ['yes', 'True']}` + - CM script: [extract-file](https://github.com/mlcommons/cm4mlops/tree/master/script/extract-file) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-train/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-train/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-train/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-train/_cm.json) + +___ +### Script output +`cmr "get imagenet train dataset original " [--input_flags] -j` +#### New environment keys (filter) + +* `CM_DATASET_IMAGENET_*` +* `CM_DATASET_PATH` +#### New environment keys auto-detected from customize + +* `CM_DATASET_IMAGENET_PATH` +* `CM_DATASET_IMAGENET_TRAIN_PATH` +* `CM_DATASET_IMAGENET_TRAIN_REQUIRE_DAE` +* `CM_DATASET_IMAGENET_VAL_REQUIRE_DAE` +* `CM_DATASET_PATH` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-dataset-imagenet-val.md b/docs/AI-ML-datasets/get-dataset-imagenet-val.md new file mode 100644 index 0000000000..09c78b485d --- /dev/null +++ b/docs/AI-ML-datasets/get-dataset-imagenet-val.md @@ -0,0 +1,211 @@ +Automatically generated README for this automation recipe: **get-dataset-imagenet-val** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-imagenet-val,7afd58d287fe4f11) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-val)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,val,validation,dataset,imagenet,ILSVRC,image-classification,original* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get val validation dataset imagenet ILSVRC image-classification original" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,val,validation,dataset,imagenet,ILSVRC,image-classification,original` + +`cm run script --tags=get,val,validation,dataset,imagenet,ILSVRC,image-classification,original[,variations] [--input_flags]` + +*or* + +`cmr "get val validation dataset imagenet ILSVRC image-classification original"` + +`cmr "get val validation dataset imagenet ILSVRC image-classification original [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,val,validation,dataset,imagenet,ILSVRC,image-classification,original' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,val,validation,dataset,imagenet,ILSVRC,image-classification,original"``` + +#### Run this script via Docker (beta) + +`cm docker script "get val validation dataset imagenet ILSVRC image-classification original[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_2012-500` + - Workflow: + * `_2012-full` + - Workflow: + * `_run-during-docker-build` + - Workflow: + +
+ + + * Group "**count**" +
+ Click here to expand this section. + + * `_full` + - Environment variables: + - *CM_DATASET_SIZE*: `50000` + - *CM_IMAGENET_FULL*: `yes` + - *CM_DAE_FILENAME*: `ILSVRC2012_img_val.tar` + - *CM_DAE_DOWNLOADED_CHECKSUM*: `29b22e2961454d5413ddabcf34fc5622` + - Workflow: + * `_size.#` + - Environment variables: + - *CM_DATASET_SIZE*: `#` + - Workflow: + * **`_size.500`** (default) + - Environment variables: + - *CM_DATASET_SIZE*: `500` + - *CM_DAE_FILENAME*: `ILSVRC2012_img_val_500.tar` + - *CM_DAE_URL*: `http://cKnowledge.org/ai/data/ILSVRC2012_img_val_500.tar` + - Workflow: + +
+ + + * Group "**dataset-version**" +
+ Click here to expand this section. + + * **`_2012`** (default) + - Environment variables: + - *CM_DATASET_VER*: `2012` + - Workflow: + +
+ + +#### Default variations + +`_2012,_size.500` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--imagenet_path=value` → `IMAGENET_PATH=value` +* `--torrent=value` → `CM_DATASET_IMAGENET_VAL_TORRENT_PATH=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "imagenet_path":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-val/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-val/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-val/_cm.json)*** + * download-and-extract,file,_extract + * Enable this dependency only if all ENV vars are set:
+`{'CM_DATASET_IMAGENET_VAL_REQUIRE_DAE': ['yes', 'True']}` + - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) + * file,extract,_no-remove-extracted + * Enable this dependency only if all ENV vars are set:
+`{'CM_DAE_ONLY_EXTRACT': ['yes', 'True']}` + - CM script: [extract-file](https://github.com/mlcommons/cm4mlops/tree/master/script/extract-file) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-val/run.bat) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-val/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-val/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-val/_cm.json) + +___ +### Script output +`cmr "get val validation dataset imagenet ILSVRC image-classification original [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_DATASET_IMAGENET_PATH` +* `CM_DATASET_IMAGENET_VAL_PATH` +* `CM_DATASET_PATH` +* `CM_DATASET_SIZE` +* `CM_DATASET_VER` +#### New environment keys auto-detected from customize + +* `CM_DATASET_IMAGENET_PATH` +* `CM_DATASET_IMAGENET_VAL_PATH` +* `CM_DATASET_PATH` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-dataset-kits19.md b/docs/AI-ML-datasets/get-dataset-kits19.md new file mode 100644 index 0000000000..53f222b565 --- /dev/null +++ b/docs/AI-ML-datasets/get-dataset-kits19.md @@ -0,0 +1,172 @@ +Automatically generated README for this automation recipe: **get-dataset-kits19** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-kits19,79992bb221024ac5) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-kits19)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,dataset,medical-imaging,kits,original,kits19* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get dataset medical-imaging kits original kits19" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,dataset,medical-imaging,kits,original,kits19` + +`cm run script --tags=get,dataset,medical-imaging,kits,original,kits19[,variations] ` + +*or* + +`cmr "get dataset medical-imaging kits original kits19"` + +`cmr "get dataset medical-imaging kits original kits19 [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,dataset,medical-imaging,kits,original,kits19' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,dataset,medical-imaging,kits,original,kits19"``` + +#### Run this script via Docker (beta) + +`cm docker script "get dataset medical-imaging kits original kits19[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_calibration` + - Environment variables: + - *CM_DATASET_CALIBRATION*: `yes` + - Workflow: + * `_default` + - Environment variables: + - *CM_GIT_PATCH*: `no` + - Workflow: + * `_full-history` + - Environment variables: + - *CM_GIT_DEPTH*: `` + - Workflow: + * `_no-recurse-submodules` + - Environment variables: + - *CM_GIT_RECURSE_SUBMODULES*: `` + - Workflow: + * `_patch` + - Environment variables: + - *CM_GIT_PATCH*: `yes` + - Workflow: + * `_short-history` + - Environment variables: + - *CM_GIT_DEPTH*: `--depth 5` + - Workflow: + * `_validation` + - Environment variables: + - *CM_DATASET_VALIDATION*: `yes` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_GIT_CHECKOUT: `master` +* CM_GIT_DEPTH: `--depth 2` +* CM_GIT_PATCH: `no` +* CM_GIT_RECURSE_SUBMODULES: `` +* CM_GIT_URL: `https://github.com/neheller/kits19` + +
+ +#### Versions +Default version: `master` + +* `custom` +* `master` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-kits19/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-kits19/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-kits19/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-kits19/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-kits19/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-kits19/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-kits19/_cm.json) + +___ +### Script output +`cmr "get dataset medical-imaging kits original kits19 [,variations]" -j` +#### New environment keys (filter) + +* `CM_DATASET_*` +#### New environment keys auto-detected from customize + +* `CM_DATASET_PATH` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-dataset-librispeech.md b/docs/AI-ML-datasets/get-dataset-librispeech.md new file mode 100644 index 0000000000..170522f4c7 --- /dev/null +++ b/docs/AI-ML-datasets/get-dataset-librispeech.md @@ -0,0 +1,134 @@ +Automatically generated README for this automation recipe: **get-dataset-librispeech** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-librispeech,09f29df607e0415d) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-librispeech)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,dataset,speech,speech-recognition,librispeech,validation,audio,training,original* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get dataset speech speech-recognition librispeech validation audio training original" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,dataset,speech,speech-recognition,librispeech,validation,audio,training,original` + +`cm run script --tags=get,dataset,speech,speech-recognition,librispeech,validation,audio,training,original ` + +*or* + +`cmr "get dataset speech speech-recognition librispeech validation audio training original"` + +`cmr "get dataset speech speech-recognition librispeech validation audio training original " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,dataset,speech,speech-recognition,librispeech,validation,audio,training,original' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,dataset,speech,speech-recognition,librispeech,validation,audio,training,original"``` + +#### Run this script via Docker (beta) + +`cm docker script "get dataset speech speech-recognition librispeech validation audio training original" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `dev-clean` + +* `dev-clean` +* `dev-other` +* `test-clean` +* `test-other` +* `train-clean-100` +* `train-clean-360` +* `train-other-500` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-librispeech/_cm.json)*** + * get,sys-utils-cm + * CM names: `--adr.['sys-utils']...` + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-librispeech/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-librispeech/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-librispeech/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-librispeech/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-librispeech/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-librispeech/_cm.json) + +___ +### Script output +`cmr "get dataset speech speech-recognition librispeech validation audio training original " -j` +#### New environment keys (filter) + +* `CM_DATASET_*` +#### New environment keys auto-detected from customize + +* `CM_DATASET_LIBRISPEECH_PATH` +* `CM_DATASET_PATH` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-dataset-openimages-annotations.md b/docs/AI-ML-datasets/get-dataset-openimages-annotations.md new file mode 100644 index 0000000000..c7b470c4db --- /dev/null +++ b/docs/AI-ML-datasets/get-dataset-openimages-annotations.md @@ -0,0 +1,144 @@ +Automatically generated README for this automation recipe: **get-dataset-openimages-annotations** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-openimages-annotations,47e2158ed24c44e9) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-annotations)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,aux,dataset-aux,object-detection,openimages,annotations* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get aux dataset-aux object-detection openimages annotations" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,aux,dataset-aux,object-detection,openimages,annotations` + +`cm run script --tags=get,aux,dataset-aux,object-detection,openimages,annotations[,variations] ` + +*or* + +`cmr "get aux dataset-aux object-detection openimages annotations"` + +`cmr "get aux dataset-aux object-detection openimages annotations [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,aux,dataset-aux,object-detection,openimages,annotations' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,aux,dataset-aux,object-detection,openimages,annotations"``` + +#### Run this script via Docker (beta) + +`cm docker script "get aux dataset-aux object-detection openimages annotations[variations]" ` + +___ +### Customization + + +#### Variations + + * Group "**download-source**" +
+ Click here to expand this section. + + * **`_from.github`** (default) + - Environment variables: + - *CM_WGET_URL*: `https://github.com/mlcommons/inference/releases/download/v2.1/openimages-mlperf_annotations_2.1.json.zip` + - Workflow: + +
+ + +#### Default variations + +`_from.github` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-annotations/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-annotations/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-annotations/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-annotations/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-annotations/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-annotations/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-annotations/_cm.json) + +___ +### Script output +`cmr "get aux dataset-aux object-detection openimages annotations [,variations]" -j` +#### New environment keys (filter) + +* `CM_DATASET_ANNOTATIONS_*` +* `CM_DATASET_OPENIMAGES_ANNOTATIONS_*` +#### New environment keys auto-detected from customize + +* `CM_DATASET_ANNOTATIONS_DIR_PATH` +* `CM_DATASET_ANNOTATIONS_FILE_PATH` +* `CM_DATASET_OPENIMAGES_ANNOTATIONS_DIR_PATH` +* `CM_DATASET_OPENIMAGES_ANNOTATIONS_FILE_PATH` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-dataset-openimages-calibration.md b/docs/AI-ML-datasets/get-dataset-openimages-calibration.md new file mode 100644 index 0000000000..969e9872dc --- /dev/null +++ b/docs/AI-ML-datasets/get-dataset-openimages-calibration.md @@ -0,0 +1,178 @@ +Automatically generated README for this automation recipe: **get-dataset-openimages-calibration** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-openimages-calibration,27228976bb084dd0) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-calibration)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *get,dataset,openimages,calibration* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get dataset openimages calibration" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,dataset,openimages,calibration` + +`cm run script --tags=get,dataset,openimages,calibration[,variations] ` + +*or* + +`cmr "get dataset openimages calibration"` + +`cmr "get dataset openimages calibration [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,dataset,openimages,calibration' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,dataset,openimages,calibration"``` + +#### Run this script via Docker (beta) + +`cm docker script "get dataset openimages calibration[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_filter` + - Environment variables: + - *CM_CALIBRATE_FILTER*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,openimages,dataset,original,_calibration + - CM script: [get-dataset-openimages](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openimages) + +
+ + + * Group "**calibration-option**" +
+ Click here to expand this section. + + * **`_mlperf.option1`** (default) + - Environment variables: + - *CM_MLPERF_OPENIMAGES_CALIBRATION_OPTION*: `one` + - *CM_DOWNLOAD_CHECKSUM1*: `f09719174af3553119e2c621157773a6` + - Workflow: + +
+ + + * Group "**filter-size**" +
+ Click here to expand this section. + + * `_filter-size.#` + - Environment variables: + - *CM_CALIBRATION_FILTER_SIZE*: `#` + - Workflow: + * `_filter-size.400` + - Environment variables: + - *CM_CALIBRATION_FILTER_SIZE*: `400` + - Workflow: + +
+ + +#### Default variations + +`_mlperf.option1` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-calibration/_cm.yaml)*** + * download,file + * CM names: `--adr.['calibration-file-downloader']...` + - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-calibration/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-calibration/_cm.yaml) + 1. ***Run native script if exists*** + * [run-filter.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-calibration/run-filter.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-calibration/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-calibration/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-calibration/_cm.yaml) + +___ +### Script output +`cmr "get dataset openimages calibration [,variations]" -j` +#### New environment keys (filter) + +* `CM_MLPERF_OPENIMAGES_CALIBRATION_LIST_FILE_WITH_PATH` +#### New environment keys auto-detected from customize + +* `CM_MLPERF_OPENIMAGES_CALIBRATION_LIST_FILE_WITH_PATH` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-dataset-openimages.md b/docs/AI-ML-datasets/get-dataset-openimages.md new file mode 100644 index 0000000000..a5d30a4b0a --- /dev/null +++ b/docs/AI-ML-datasets/get-dataset-openimages.md @@ -0,0 +1,250 @@ +Automatically generated README for this automation recipe: **get-dataset-openimages** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-openimages,0a9d49b644cf4142) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,dataset,openimages,open-images,object-detection,original* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get dataset openimages open-images object-detection original" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,dataset,openimages,open-images,object-detection,original` + +`cm run script --tags=get,dataset,openimages,open-images,object-detection,original[,variations] ` + +*or* + +`cmr "get dataset openimages open-images object-detection original"` + +`cmr "get dataset openimages open-images object-detection original [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,dataset,openimages,open-images,object-detection,original' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,dataset,openimages,open-images,object-detection,original"``` + +#### Run this script via Docker (beta) + +`cm docker script "get dataset openimages open-images object-detection original[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_filter` + - Workflow: + * `_filter,calibration` + - Workflow: + * `_filter-size.#` + - Workflow: + * `_using-fiftyone` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_fiftyone + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,openssl,lib + - CM script: [get-openssl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-openssl) + +
+ + + * Group "**annotations**" +
+ Click here to expand this section. + + * `_custom-annotations` + - Environment variables: + - *CM_DATASET_OPENIMAGES_CUSTOM_ANNOTATIONS*: `yes` + - Workflow: + * **`_default-annotations`** (default) + - Environment variables: + - *CM_DATASET_OPENIMAGES_CUSTOM_ANNOTATIONS*: `no` + - Workflow: + +
+ + + * Group "**dataset-type**" +
+ Click here to expand this section. + + * `_calibration` + - Environment variables: + - *CM_DATASET_CALIBRATION*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,openimages,calibration + * CM names: `--adr.['openimages-calibration']...` + - CM script: [get-dataset-openimages-calibration](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openimages-calibration) + * **`_validation`** (default) + - Environment variables: + - *CM_DATASET_CALIBRATION*: `no` + - Workflow: + +
+ + + * Group "**size**" +
+ Click here to expand this section. + + * **`_50`** (default) + - Environment variables: + - *CM_DATASET_SIZE*: `50` + - Workflow: + * `_500` + - Environment variables: + - *CM_DATASET_SIZE*: `500` + - Workflow: + * `_full` + - Environment variables: + - *CM_DATASET_SIZE*: `` + - Workflow: + * `_size.#` + - Environment variables: + - *CM_DATASET_SIZE*: `#` + - Workflow: + +
+ + +#### Default variations + +`_50,_default-annotations,_validation` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_DATASET_CALIBRATION: `no` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages/_cm.json)*** + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-python-lib,_requests + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * mlperf,inference,source + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,generic-python-lib,_boto3 + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_tqdm + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_numpy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_opencv-python + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_pandas + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_pycocotools + * CM names: `--adr.['pycocotools']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages/run.sh) + 1. ***Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages/_cm.json)*** + * get,openimages,annotations + * Enable this dependency only if all ENV vars are set:
+`{'CM_DATASET_OPENIMAGES_CUSTOM_ANNOTATIONS': ['yes']}` + - CM script: [get-dataset-openimages-annotations](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openimages-annotations) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages/_cm.json) + +___ +### Script output +`cmr "get dataset openimages open-images object-detection original [,variations]" -j` +#### New environment keys (filter) + +* `CM_CALIBRATION_DATASET_PATH` +* `CM_DATASET_ANNOTATIONS_DIR_PATH` +* `CM_DATASET_ANNOTATIONS_FILE_PATH` +* `CM_DATASET_CALIBRATION_ANNOTATIONS_FILE_PATH` +* `CM_DATASET_PATH` +* `CM_DATASET_PATH_ROOT` +* `CM_DATASET_VALIDATION_ANNOTATIONS_FILE_PATH` +#### New environment keys auto-detected from customize + +* `CM_CALIBRATION_DATASET_PATH` +* `CM_DATASET_ANNOTATIONS_DIR_PATH` +* `CM_DATASET_ANNOTATIONS_FILE_PATH` +* `CM_DATASET_CALIBRATION_ANNOTATIONS_FILE_PATH` +* `CM_DATASET_PATH` +* `CM_DATASET_PATH_ROOT` +* `CM_DATASET_VALIDATION_ANNOTATIONS_FILE_PATH` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-dataset-openorca.md b/docs/AI-ML-datasets/get-dataset-openorca.md new file mode 100644 index 0000000000..982a9c9c62 --- /dev/null +++ b/docs/AI-ML-datasets/get-dataset-openorca.md @@ -0,0 +1,173 @@ +Automatically generated README for this automation recipe: **get-dataset-openorca** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-openorca,9252c4d90d5940b7) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openorca)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,dataset,openorca,language-processing,original* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get dataset openorca language-processing original" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,dataset,openorca,language-processing,original` + +`cm run script --tags=get,dataset,openorca,language-processing,original[,variations] ` + +*or* + +`cmr "get dataset openorca language-processing original"` + +`cmr "get dataset openorca language-processing original [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,dataset,openorca,language-processing,original' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,dataset,openorca,language-processing,original"``` + +#### Run this script via Docker (beta) + +`cm docker script "get dataset openorca language-processing original[variations]" ` + +___ +### Customization + + +#### Variations + + * Group "**dataset-type**" +
+ Click here to expand this section. + + * `_calibration` + - Environment variables: + - *CM_DATASET_CALIBRATION*: `yes` + - Workflow: + * **`_validation`** (default) + - Environment variables: + - *CM_DATASET_CALIBRATION*: `no` + - Workflow: + +
+ + + * Group "**size**" +
+ Click here to expand this section. + + * `_500` + - Environment variables: + - *CM_DATASET_SIZE*: `500` + - Workflow: + * **`_60`** (default) + - Environment variables: + - *CM_DATASET_SIZE*: `60` + - Workflow: + * `_full` + - Environment variables: + - *CM_DATASET_SIZE*: `24576` + - Workflow: + * `_size.#` + - Environment variables: + - *CM_DATASET_SIZE*: `#` + - Workflow: + +
+ + +#### Default variations + +`_60,_validation` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_DATASET_CALIBRATION: `no` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openorca/_cm.json)*** + * get,git,repo,_lfs,_repo.https://huggingface.co/datasets/Open-Orca/OpenOrca + * CM names: `--adr.['openorca-src']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openorca/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openorca/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openorca/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openorca/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openorca/_cm.json) + +___ +### Script output +`cmr "get dataset openorca language-processing original [,variations]" -j` +#### New environment keys (filter) + +* `CM_DATASET_*` +#### New environment keys auto-detected from customize + +* `CM_DATASET_OPENORCA_PARQUET` +* `CM_DATASET_PATH` +* `CM_DATASET_PATH_ROOT` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-dataset-squad-vocab.md b/docs/AI-ML-datasets/get-dataset-squad-vocab.md new file mode 100644 index 0000000000..1152f22929 --- /dev/null +++ b/docs/AI-ML-datasets/get-dataset-squad-vocab.md @@ -0,0 +1,142 @@ +Automatically generated README for this automation recipe: **get-dataset-squad-vocab** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-squad-vocab,e38874fff5094577) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad-vocab)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,aux,dataset-aux,language-processing,squad-aux,vocab,squad-vocab* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get aux dataset-aux language-processing squad-aux vocab squad-vocab" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,aux,dataset-aux,language-processing,squad-aux,vocab,squad-vocab` + +`cm run script --tags=get,aux,dataset-aux,language-processing,squad-aux,vocab,squad-vocab[,variations] ` + +*or* + +`cmr "get aux dataset-aux language-processing squad-aux vocab squad-vocab"` + +`cmr "get aux dataset-aux language-processing squad-aux vocab squad-vocab [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,aux,dataset-aux,language-processing,squad-aux,vocab,squad-vocab' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,aux,dataset-aux,language-processing,squad-aux,vocab,squad-vocab"``` + +#### Run this script via Docker (beta) + +`cm docker script "get aux dataset-aux language-processing squad-aux vocab squad-vocab[variations]" ` + +___ +### Customization + + +#### Variations + + * Group "**download-source**" +
+ Click here to expand this section. + + * **`_from.zenodo`** (default) + - Environment variables: + - *CM_WGET_URL*: `https://zenodo.org/record/3733868/files/vocab.txt` + - Workflow: + +
+ + +#### Default variations + +`_from.zenodo` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad-vocab/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad-vocab/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad-vocab/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad-vocab/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad-vocab/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad-vocab/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad-vocab/_cm.json) + +___ +### Script output +`cmr "get aux dataset-aux language-processing squad-aux vocab squad-vocab [,variations]" -j` +#### New environment keys (filter) + +* `CM_DATASET_SQUAD_VOCAB_PATH` +* `CM_ML_MODEL_BERT_VOCAB_FILE_WITH_PATH` +#### New environment keys auto-detected from customize + +* `CM_DATASET_SQUAD_VOCAB_PATH` +* `CM_ML_MODEL_BERT_VOCAB_FILE_WITH_PATH` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-dataset-squad.md b/docs/AI-ML-datasets/get-dataset-squad.md new file mode 100644 index 0000000000..a7f1a55959 --- /dev/null +++ b/docs/AI-ML-datasets/get-dataset-squad.md @@ -0,0 +1,129 @@ +Automatically generated README for this automation recipe: **get-dataset-squad** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-squad,6651c119c3ae49b3) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,dataset,squad,language-processing,validation,original* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get dataset squad language-processing validation original" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,dataset,squad,language-processing,validation,original` + +`cm run script --tags=get,dataset,squad,language-processing,validation,original ` + +*or* + +`cmr "get dataset squad language-processing validation original"` + +`cmr "get dataset squad language-processing validation original " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,dataset,squad,language-processing,validation,original' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,dataset,squad,language-processing,validation,original"``` + +#### Run this script via Docker (beta) + +`cm docker script "get dataset squad language-processing validation original" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `1.1` + +* `1.1` +* `2.0` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad/_cm.json)*** + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad/_cm.json) + +___ +### Script output +`cmr "get dataset squad language-processing validation original " -j` +#### New environment keys (filter) + +* `CM_DATASET_*` +#### New environment keys auto-detected from customize + +* `CM_DATASET_PATH` +* `CM_DATASET_SQUAD_PATH` +* `CM_DATASET_SQUAD_VAL_PATH` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-preprocessed-dataset-criteo.md b/docs/AI-ML-datasets/get-preprocessed-dataset-criteo.md new file mode 100644 index 0000000000..fec163969d --- /dev/null +++ b/docs/AI-ML-datasets/get-preprocessed-dataset-criteo.md @@ -0,0 +1,226 @@ +Automatically generated README for this automation recipe: **get-preprocessed-dataset-criteo** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-preprocessed-dataset-criteo,afa59956272a4ba4) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-criteo)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,dataset,criteo,recommendation,dlrm,preprocessed* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get dataset criteo recommendation dlrm preprocessed" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,dataset,criteo,recommendation,dlrm,preprocessed` + +`cm run script --tags=get,dataset,criteo,recommendation,dlrm,preprocessed[,variations] [--input_flags]` + +*or* + +`cmr "get dataset criteo recommendation dlrm preprocessed"` + +`cmr "get dataset criteo recommendation dlrm preprocessed [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,dataset,criteo,recommendation,dlrm,preprocessed' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,dataset,criteo,recommendation,dlrm,preprocessed"``` + +#### Run this script via Docker (beta) + +`cm docker script "get dataset criteo recommendation dlrm preprocessed[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_1` + - Environment variables: + - *CM_DATASET_SIZE*: `1` + - Workflow: + * `_50` + - Environment variables: + - *CM_DATASET_SIZE*: `50` + - Workflow: + * `_fake` + - Environment variables: + - *CM_CRITEO_FAKE*: `yes` + - Workflow: + * `_full` + - Workflow: + * `_validation` + - Workflow: + +
+ + + * Group "**type**" +
+ Click here to expand this section. + + * **`_multihot`** (default) + - Environment variables: + - *CM_DATASET_CRITEO_MULTIHOT*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,mlperf,training,src + * CM names: `--adr.['mlperf-training', 'training-src']...` + - CM script: [get-mlperf-training-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-training-src) + * get,generic-python-lib,_package.typing_inspect + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.iopath + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.fbgemm_gpu + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.torchrec + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.pyre_extensions + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + +
+ + +#### Default variations + +`_multihot` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--dir=value` → `CM_DATASET_PREPROCESSED_PATH=value` +* `--output_dir=value` → `CM_DATASET_PREPROCESSED_OUTPUT_PATH=value` +* `--threads=value` → `CM_NUM_PREPROCESS_THREADS=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "dir":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-criteo/_cm.json)*** + * get,python3 + * CM names: `--adr.['python3', 'python']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,dataset,criteo,original + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_DATASET_PREPROCESSED_PATH': ['on']}` + * CM names: `--adr.['original-dataset', 'criteo-dataset']...` + - CM script: [get-dataset-criteo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-criteo) + * get,dlrm,src + * CM names: `--adr.['dlrm-src']...` + - CM script: [get-dlrm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dlrm) + * mlperf,mlcommons,inference,source,src + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,generic-python-lib,_scikit-learn + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torch + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_opencv-python + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_decorator + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_psutil + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_onnx + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_tqdm + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_mlperf_logging + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-criteo/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-criteo/_cm.json) + 1. ***Run native script if exists*** + * [run-multihot.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-criteo/run-multihot.sh) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-criteo/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-criteo/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-criteo/_cm.json) + +___ +### Script output +`cmr "get dataset criteo recommendation dlrm preprocessed [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_DATASET_*` +#### New environment keys auto-detected from customize + +* `CM_DATASET_PREPROCESSED_PATH` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-preprocessed-dataset-generic.md b/docs/AI-ML-datasets/get-preprocessed-dataset-generic.md new file mode 100644 index 0000000000..f6ecaad040 --- /dev/null +++ b/docs/AI-ML-datasets/get-preprocessed-dataset-generic.md @@ -0,0 +1,117 @@ +Automatically generated README for this automation recipe: **get-preprocesser-script-generic** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-preprocesser-script-generic,d5e603627e2046eb) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocesser-script-generic)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,preprocessor,generic,image-preprocessor,script* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get preprocessor generic image-preprocessor script" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,preprocessor,generic,image-preprocessor,script` + +`cm run script --tags=get,preprocessor,generic,image-preprocessor,script ` + +*or* + +`cmr "get preprocessor generic image-preprocessor script"` + +`cmr "get preprocessor generic image-preprocessor script " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,preprocessor,generic,image-preprocessor,script' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,preprocessor,generic,image-preprocessor,script"``` + +#### Run this script via Docker (beta) + +`cm docker script "get preprocessor generic image-preprocessor script" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocesser-script-generic/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocesser-script-generic/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocesser-script-generic/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocesser-script-generic/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocesser-script-generic/_cm.json) + +___ +### Script output +`cmr "get preprocessor generic image-preprocessor script " -j` +#### New environment keys (filter) + +* `+PYTHONPATH` +#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-datasets/get-preprocessed-dataset-imagenet.md b/docs/AI-ML-datasets/get-preprocessed-dataset-imagenet.md new file mode 100644 index 0000000000..6c557299aa --- /dev/null +++ b/docs/AI-ML-datasets/get-preprocessed-dataset-imagenet.md @@ -0,0 +1,456 @@ +Automatically generated README for this automation recipe: **get-preprocessed-dataset-imagenet** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-preprocessed-dataset-imagenet,f259d490bbaf45f5) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-imagenet)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,dataset,imagenet,ILSVRC,image-classification,preprocessed* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get dataset imagenet ILSVRC image-classification preprocessed" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,dataset,imagenet,ILSVRC,image-classification,preprocessed` + +`cm run script --tags=get,dataset,imagenet,ILSVRC,image-classification,preprocessed[,variations] [--input_flags]` + +*or* + +`cmr "get dataset imagenet ILSVRC image-classification preprocessed"` + +`cmr "get dataset imagenet ILSVRC image-classification preprocessed [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,dataset,imagenet,ILSVRC,image-classification,preprocessed' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,dataset,imagenet,ILSVRC,image-classification,preprocessed"``` + +#### Run this script via Docker (beta) + +`cm docker script "get dataset imagenet ILSVRC image-classification preprocessed[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *Internal group (variations should not be selected manually)* +
+ Click here to expand this section. + + * `_mobilenet_` + - Environment variables: + - *CM_MODEL*: `mobilenet` + - Workflow: + * `_resnet50_` + - Environment variables: + - *CM_MODEL*: `resnet50` + - Workflow: + +
+ + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_500,validation` + - Workflow: + * `_default` + - Workflow: + * `_for.mobilenet,float32` + - Environment variables: + - *CM_DATASET_QUANTIZE*: `0` + - *CM_DATASET_GIVEN_CHANNEL_MEANS*: `` + - *CM_DATASET_NORMALIZE_DATA*: `1` + - *CM_DATASET_SUBTRACT_MEANS*: `0` + - Workflow: + * `_for.mobilenet,rgb8` + - Environment variables: + - *CM_DATASET_GIVEN_CHANNEL_MEANS*: `` + - *CM_DATASET_SUBTRACT_MEANS*: `0` + - *CM_DATASET_QUANTIZE*: `0` + - *CM_DATASET_NORMALIZE_DATA*: `0` + - *CM_DATASET_DATA_TYPE*: `uint8` + - Workflow: + * `_for.resnet50,float32` + - Workflow: + * `_for.resnet50,rgb8` + - Environment variables: + - *CM_DATASET_GIVEN_CHANNEL_MEANS*: `` + - *CM_DATASET_SUBTRACT_MEANS*: `0` + - *CM_DATASET_NORMALIZE_DATA*: `0` + - *CM_DATASET_QUANTIZE*: `0` + - *CM_DATASET_DATA_TYPE*: `uint8` + - Workflow: + * `_for.resnet50,rgb8,uint8` + - Environment variables: + - *CM_DATASET_GIVEN_CHANNEL_MEANS*: `123.68 116.78 103.94` + - *CM_DATASET_SUBTRACT_MEANS*: `1` + - *CM_DATASET_QUANTIZE*: `1` + - Workflow: + * `_for.resnet50,uint8` + - Environment variables: + - *CM_DATASET_QUANT_SCALE*: `1.18944883` + - *CM_DATASET_QUANT_OFFSET*: `0` + - Workflow: + * `_pytorch` + - Environment variables: + - *CM_PREPROCESS_PYTORCH*: `yes` + - *CM_MODEL*: `resnet50` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_torchvision + * CM names: `--adr.['torchvision']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_tflite_tpu` + - Environment variables: + - *CM_MODEL*: `resnet50` + - *CM_PREPROCESS_TFLITE_TPU*: `yes` + - Workflow: + +
+ + + * Group "**calibration-option**" +
+ Click here to expand this section. + + * `_mlperf.option1` + - Environment variables: + - *CM_DATASET_CALIBRATION_OPTION*: `one` + - Workflow: + * `_mlperf.option2` + - Environment variables: + - *CM_DATASET_CALIBRATION_OPTION*: `two` + - Workflow: + +
+ + + * Group "**dataset-type**" +
+ Click here to expand this section. + + * `_calibration` + - Environment variables: + - *CM_DATASET_TYPE*: `calibration` + - Workflow: + * **`_validation`** (default) + - Environment variables: + - *CM_DATASET_TYPE*: `validation` + - Workflow: + +
+ + + * Group "**extension**" +
+ Click here to expand this section. + + * `_rgb32` + - Environment variables: + - *CM_DATASET_PREPROCESSED_EXTENSION*: `rgb32` + - Workflow: + * `_rgb8` + - Environment variables: + - *CM_DATASET_PREPROCESSED_EXTENSION*: `rgb8` + - Workflow: + +
+ + + * Group "**interpolation-method**" +
+ Click here to expand this section. + + * `_inter.area` + - Environment variables: + - *CM_DATASET_INTERPOLATION_METHOD*: `INTER_AREA` + - Workflow: + * `_inter.linear` + - Environment variables: + - *CM_DATASET_INTERPOLATION_METHOD*: `INTER_LINEAR` + - Workflow: + +
+ + + * Group "**layout**" +
+ Click here to expand this section. + + * **`_NCHW`** (default) + - Environment variables: + - *CM_DATASET_DATA_LAYOUT*: `NCHW` + - Workflow: + * `_NHWC` + - Environment variables: + - *CM_DATASET_DATA_LAYOUT*: `NHWC` + - Workflow: + +
+ + + * Group "**model**" +
+ Click here to expand this section. + + * `_for.mobilenet` + - Workflow: + * `_for.resnet50` + - Environment variables: + - *CM_DATASET_SUBTRACT_MEANS*: `1` + - *CM_DATASET_GIVEN_CHANNEL_MEANS*: `123.68 116.78 103.94` + - *CM_DATASET_NORMALIZE_DATA*: `0` + - *CM_DATASET_INTERPOLATION_METHOD*: `INTER_AREA` + - Workflow: + +
+ + + * Group "**precision**" +
+ Click here to expand this section. + + * `_float32` + - Environment variables: + - *CM_DATASET_DATA_TYPE*: `float32` + - *CM_DATASET_QUANTIZE*: `0` + - *CM_DATASET_CONVERT_TO_UNSIGNED*: `0` + - Workflow: + * `_int8` + - Environment variables: + - *CM_DATASET_DATA_TYPE*: `int8` + - *CM_DATASET_QUANTIZE*: `1` + - *CM_DATASET_CONVERT_TO_UNSIGNED*: `0` + - Workflow: + * `_uint8` + - Environment variables: + - *CM_DATASET_DATA_TYPE*: `uint8` + - *CM_DATASET_DATA_TYPE_INPUT*: `float32` + - *CM_DATASET_QUANTIZE*: `1` + - *CM_DATASET_CONVERT_TO_UNSIGNED*: `1` + - Workflow: + +
+ + + * Group "**preprocessing-source**" +
+ Click here to expand this section. + + * `_generic-preprocessor` + - Environment variables: + - *CM_DATASET_REFERENCE_PREPROCESSOR*: `0` + - Workflow: + 1. ***Read "prehook_deps" on other CM scripts*** + * get,generic,image-preprocessor + - CM script: [get-preprocesser-script-generic](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocesser-script-generic) + * **`_mlcommons-reference-preprocessor`** (default) + - Environment variables: + - *CM_DATASET_REFERENCE_PREPROCESSOR*: `1` + - Workflow: + +
+ + + * Group "**resolution**" +
+ Click here to expand this section. + + * `_resolution.#` + - Environment variables: + - *CM_DATASET_INPUT_SQUARE_SIDE*: `#` + - Workflow: + * **`_resolution.224`** (default) + - Environment variables: + - *CM_DATASET_INPUT_SQUARE_SIDE*: `224` + - Workflow: + +
+ + + * Group "**size**" +
+ Click here to expand this section. + + * `_1` + - Environment variables: + - *CM_DATASET_SIZE*: `1` + - Workflow: + * `_500` + - Environment variables: + - *CM_DATASET_SIZE*: `500` + - Workflow: + * `_full` + - Environment variables: + - *CM_DATASET_SIZE*: `50000` + - Workflow: + * `_size.#` + - Environment variables: + - *CM_DATASET_SIZE*: `#` + - Workflow: + +
+ + +#### Default variations + +`_NCHW,_mlcommons-reference-preprocessor,_resolution.224,_validation` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--dir=value` → `CM_DATASET_PREPROCESSED_PATH=value` +* `--imagenet_path=value` → `CM_IMAGENET_PATH=value` +* `--imagenet_preprocessed_path=value` → `CM_IMAGENET_PREPROCESSED_PATH=value` +* `--threads=value` → `CM_NUM_PREPROCESS_THREADS=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "dir":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_DATASET_CROP_FACTOR: `87.5` +* CM_DATASET_DATA_TYPE: `float32` +* CM_DATASET_DATA_LAYOUT: `NCHW` +* CM_DATASET_QUANT_SCALE: `1` +* CM_DATASET_QUANTIZE: `0` +* CM_DATASET_QUANT_OFFSET: `0` +* CM_DATASET_PREPROCESSED_EXTENSION: `npy` +* CM_DATASET_CONVERT_TO_UNSIGNED: `0` +* CM_DATASET_REFERENCE_PREPROCESSOR: `1` +* CM_PREPROCESS_VGG: `yes` +* CM_MODEL: `resnet50` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-imagenet/_cm.json)*** + * get,python3 + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_IMAGENET_PREPROCESSED_PATH': ['on']}` + * CM names: `--adr.['python3', 'python']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,dataset,image-classification,original + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_IMAGENET_PREPROCESSED_PATH': ['on']}` + * CM names: `--adr.['original-dataset']...` + - CM script: [get-dataset-imagenet-val](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-val) + * get,dataset-aux,image-classification,imagenet-aux + * Enable this dependency only if all ENV vars are set:
+`{'CM_DATASET_TYPE': ['validation']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_IMAGENET_PREPROCESSED_PATH': ['on']}` + - CM script: [get-dataset-imagenet-aux](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-aux) + * get,dataset,imagenet,calibration + * Enable this dependency only if all ENV vars are set:
+`{'CM_DATASET_TYPE': ['calibration']}` + - CM script: [get-dataset-imagenet-calibration](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-calibration) + * get,generic-python-lib,_package.opencv-python-headless + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_pillow + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * mlperf,mlcommons,inference,source,src + * Enable this dependency only if all ENV vars are set:
+`{'CM_DATASET_REFERENCE_PREPROCESSOR': ['1']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_IMAGENET_PREPROCESSED_PATH': ['on']}` + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-imagenet/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-imagenet/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-imagenet/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-imagenet/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-imagenet/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-imagenet/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-imagenet/_cm.json) + +___ +### Script output +`cmr "get dataset imagenet ILSVRC image-classification preprocessed [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_DATASET_*` +#### New environment keys auto-detected from customize + +* `CM_DATASET_DATA_TYPE_INPUT` +* `CM_DATASET_IMAGES_LIST` +* `CM_DATASET_PREPROCESSED_IMAGENAMES_LIST` +* `CM_DATASET_PREPROCESSED_IMAGES_LIST` +* `CM_DATASET_PREPROCESSED_PATH` +* `CM_DATASET_SIZE` +* `CM_DATASET_TYPE` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-preprocessed-dataset-kits19.md b/docs/AI-ML-datasets/get-preprocessed-dataset-kits19.md new file mode 100644 index 0000000000..35e4a05b20 --- /dev/null +++ b/docs/AI-ML-datasets/get-preprocessed-dataset-kits19.md @@ -0,0 +1,232 @@ +Automatically generated README for this automation recipe: **get-preprocessed-dataset-kits19** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-preprocessed-dataset-kits19,2094d9b9ab6c4c9e) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-kits19)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,dataset,medical-imaging,kits19,preprocessed* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get dataset medical-imaging kits19 preprocessed" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,dataset,medical-imaging,kits19,preprocessed` + +`cm run script --tags=get,dataset,medical-imaging,kits19,preprocessed[,variations] [--input_flags]` + +*or* + +`cmr "get dataset medical-imaging kits19 preprocessed"` + +`cmr "get dataset medical-imaging kits19 preprocessed [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,dataset,medical-imaging,kits19,preprocessed' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,dataset,medical-imaging,kits19,preprocessed"``` + +#### Run this script via Docker (beta) + +`cm docker script "get dataset medical-imaging kits19 preprocessed[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_nvidia` + - Environment variables: + - *CM_PREPROCESSING_BY_NVIDIA*: `yes` + - Workflow: + +
+ + + * Group "**dataset-count**" +
+ Click here to expand this section. + + * `_1` + - Environment variables: + - *CM_DATASET_SIZE*: `1` + - Workflow: + * `_5` + - Environment variables: + - *CM_DATASET_SIZE*: `5` + - Workflow: + * `_50` + - Environment variables: + - *CM_DATASET_SIZE*: `50` + - Workflow: + * `_500` + - Environment variables: + - *CM_DATASET_SIZE*: `500` + - Workflow: + * `_full` + - Environment variables: + - *CM_DATASET_SIZE*: `` + - Workflow: + +
+ + + * Group "**dataset-precision**" +
+ Click here to expand this section. + + * **`_fp32`** (default) + - Environment variables: + - *CM_DATASET_DTYPE*: `fp32` + - Workflow: + * `_int8` + - Environment variables: + - *CM_DATASET_DTYPE*: `int8` + - Workflow: + +
+ + + * Group "**dataset-type**" +
+ Click here to expand this section. + + * `_calibration` + - Environment variables: + - *CM_DATASET_PATH*: `<<>>` + - Workflow: + * **`_validation`** (default) + - Workflow: + +
+ + +#### Default variations + +`_fp32,_validation` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--dir=value` → `CM_DATASET_PREPROCESSED_PATH=value` +* `--threads=value` → `CM_NUM_PREPROCESS_THREADS=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "dir":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_DATASET: `kits19` +* CM_DATASET_DTYPE: `fp32` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-kits19/_cm.json)*** + * get,python3 + * CM names: `--adr.['python3', 'python']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,dataset,medical-imaging,kits19,original + * CM names: `--adr.['original-dataset']...` + - CM script: [get-dataset-kits19](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-kits19) + * mlperf,mlcommons,inference,source,src + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,generic-python-lib,_scipy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_nibabel + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_numpy + * CM names: `--adr.['numpy']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-kits19/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-kits19/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-kits19/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-kits19/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-kits19/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-kits19/_cm.json) + +___ +### Script output +`cmr "get dataset medical-imaging kits19 preprocessed [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_DATASET_*` +#### New environment keys auto-detected from customize + +* `CM_DATASET_PREPROCESSED_PATH` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-preprocessed-dataset-librispeech.md b/docs/AI-ML-datasets/get-preprocessed-dataset-librispeech.md new file mode 100644 index 0000000000..875bcf494d --- /dev/null +++ b/docs/AI-ML-datasets/get-preprocessed-dataset-librispeech.md @@ -0,0 +1,222 @@ +Automatically generated README for this automation recipe: **get-preprocessed-dataset-librispeech** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-preprocessed-dataset-librispeech,e9f62fc969d5483a) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-librispeech)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,dataset,speech-recognition,librispeech,preprocessed* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get dataset speech-recognition librispeech preprocessed" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,dataset,speech-recognition,librispeech,preprocessed` + +`cm run script --tags=get,dataset,speech-recognition,librispeech,preprocessed[,variations] [--input_flags]` + +*or* + +`cmr "get dataset speech-recognition librispeech preprocessed"` + +`cmr "get dataset speech-recognition librispeech preprocessed [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,dataset,speech-recognition,librispeech,preprocessed' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,dataset,speech-recognition,librispeech,preprocessed"``` + +#### Run this script via Docker (beta) + +`cm docker script "get dataset speech-recognition librispeech preprocessed[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * Group "**dataset-count**" +
+ Click here to expand this section. + + * `_1` + - Environment variables: + - *CM_DATASET_SIZE*: `1` + - Workflow: + * `_5` + - Environment variables: + - *CM_DATASET_SIZE*: `5` + - Workflow: + * `_50` + - Environment variables: + - *CM_DATASET_SIZE*: `50` + - Workflow: + * `_500` + - Environment variables: + - *CM_DATASET_SIZE*: `500` + - Workflow: + * `_full` + - Environment variables: + - *CM_DATASET_SIZE*: `` + - Workflow: + +
+ + + * Group "**dataset-precision**" +
+ Click here to expand this section. + + * **`_fp32`** (default) + - Environment variables: + - *CM_DATASET_DTYPE*: `fp32` + - Workflow: + * `_int8` + - Environment variables: + - *CM_DATASET_DTYPE*: `int8` + - Workflow: + +
+ + + * Group "**dataset-type**" +
+ Click here to expand this section. + + * `_calibration` + - Environment variables: + - *CM_DATASET_PATH*: `<<>>` + - Workflow: + * **`_validation`** (default) + - Workflow: + +
+ + +#### Default variations + +`_fp32,_validation` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--dir=value` → `CM_DATASET_PREPROCESSED_PATH=value` +* `--threads=value` → `CM_NUM_PREPROCESS_THREADS=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "dir":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_DATASET: `kits19` +* CM_DATASET_DTYPE: `fp32` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-librispeech/_cm.json)*** + * get,python3 + * CM names: `--adr.['python3', 'python']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,dataset,speech-recognition,librispeech,original + * CM names: `--adr.['original-dataset']...` + - CM script: [get-dataset-librispeech](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-librispeech) + * mlperf,mlcommons,inference,source,src + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,generic-python-lib,_sox + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_pandas + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_tqdm + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,sys-util,generic,_sox + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-librispeech/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-librispeech/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-librispeech/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-librispeech/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-librispeech/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-librispeech/_cm.json) + +___ +### Script output +`cmr "get dataset speech-recognition librispeech preprocessed [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_DATASET_*` +#### New environment keys auto-detected from customize + +* `CM_DATASET_PREPROCESSED_JSON` +* `CM_DATASET_PREPROCESSED_PATH` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-preprocessed-dataset-openimages.md b/docs/AI-ML-datasets/get-preprocessed-dataset-openimages.md new file mode 100644 index 0000000000..84ee7e5341 --- /dev/null +++ b/docs/AI-ML-datasets/get-preprocessed-dataset-openimages.md @@ -0,0 +1,401 @@ +Automatically generated README for this automation recipe: **get-preprocessed-dataset-openimages** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-preprocessed-dataset-openimages,9842f1be8cba4c7b) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openimages)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,dataset,openimages,open-images,object-detection,preprocessed* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get dataset openimages open-images object-detection preprocessed" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,dataset,openimages,open-images,object-detection,preprocessed` + +`cm run script --tags=get,dataset,openimages,open-images,object-detection,preprocessed[,variations] [--input_flags]` + +*or* + +`cmr "get dataset openimages open-images object-detection preprocessed"` + +`cmr "get dataset openimages open-images object-detection preprocessed [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,dataset,openimages,open-images,object-detection,preprocessed' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,dataset,openimages,open-images,object-detection,preprocessed"``` + +#### Run this script via Docker (beta) + +`cm docker script "get dataset openimages open-images object-detection preprocessed[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_filter` + - Workflow: + * `_filter,calibration` + - Environment variables: + - *CM_DATASET_CALIBRATION_FILTER*: `yes` + - Workflow: + * `_for.retinanet.onnx` + - Environment variables: + - *CM_ML_MODEL_NAME*: `retinanet` + - *CM_DATASET_SUBTRACT_MEANS*: `1` + - *CM_DATASET_GIVEN_CHANNEL_MEANS*: `0.485 0.456 0.406` + - *CM_DATASET_GIVEN_CHANNEL_STDS*: `0.229 0.224 0.225` + - *CM_DATASET_NORMALIZE_DATA*: `0` + - *CM_DATASET_NORMALIZE_LOWER*: `0.0` + - *CM_DATASET_NORMALIZE_UPPER*: `1.0` + - *CM_DATASET_CONVERT_TO_BGR*: `0` + - *CM_DATASET_CROP_FACTOR*: `100.0` + - Workflow: + * `_for.retinanet.onnx,fp32` + - Workflow: + * `_for.retinanet.onnx,uint8` + - Environment variables: + - *CM_DATASET_QUANT_SCALE*: `0.0186584499` + - *CM_DATASET_QUANT_OFFSET*: `114` + - Workflow: + * `_full,validation` + - Environment variables: + - *CM_DATASET_SIZE*: `24781` + - Workflow: + * `_nvidia` + - Environment variables: + - *CM_PREPROCESSING_BY_NVIDIA*: `yes` + - Workflow: + * `_quant-offset.#` + - Workflow: + * `_quant-scale.#` + - Workflow: + +
+ + + * Group "**annotations**" +
+ Click here to expand this section. + + * `_custom-annotations` + - Workflow: + * **`_default-annotations`** (default) + - Workflow: + +
+ + + * Group "**dataset-count**" +
+ Click here to expand this section. + + * **`_50`** (default) + - Environment variables: + - *CM_DATASET_SIZE*: `50` + - Workflow: + * `_500` + - Environment variables: + - *CM_DATASET_SIZE*: `500` + - Workflow: + * `_full` + - Workflow: + * `_size.#` + - Environment variables: + - *CM_DATASET_SIZE*: `#` + - Workflow: + +
+ + + * Group "**dataset-layout**" +
+ Click here to expand this section. + + * **`_NCHW`** (default) + - Environment variables: + - *CM_DATASET_DATA_LAYOUT*: `NCHW` + - Workflow: + * `_NHWC` + - Environment variables: + - *CM_DATASET_DATA_LAYOUT*: `NHWC` + - Workflow: + +
+ + + * Group "**dataset-precision**" +
+ Click here to expand this section. + + * **`_fp32`** (default) + - Environment variables: + - *CM_DATASET_DTYPE*: `fp32` + - *CM_DATASET_INPUT_DTYPE*: `fp32` + - *CM_DATASET_QUANTIZE*: `0` + - *CM_DATASET_CONVERT_TO_UNSIGNED*: `0` + - Workflow: + * `_int8` + - Environment variables: + - *CM_DATASET_DTYPE*: `int8` + - *CM_DATASET_INPUT_DTYPE*: `fp32` + - *CM_DATASET_QUANTIZE*: `1` + - *CM_DATASET_CONVERT_TO_UNSIGNED*: `0` + - Workflow: + * `_uint8` + - Environment variables: + - *CM_DATASET_DTYPE*: `uint8` + - *CM_DATASET_INPUT_DTYPE*: `fp32` + - *CM_DATASET_QUANTIZE*: `1` + - *CM_DATASET_CONVERT_TO_UNSIGNED*: `1` + - Workflow: + +
+ + + * Group "**dataset-type**" +
+ Click here to expand this section. + + * `_calibration` + - Environment variables: + - *CM_DATASET_PATH*: `<<>>` + - *CM_DATASET_ANNOTATIONS_FILE_PATH*: `<<>>` + - *CM_DATASET_TYPE*: `calibration` + - Workflow: + * **`_validation`** (default) + - Environment variables: + - *CM_DATASET_TYPE*: `validation` + - Workflow: + +
+ + + * Group "**extension**" +
+ Click here to expand this section. + + * `_npy` + - Environment variables: + - *CM_DATASET_PREPROCESSED_EXTENSION*: `npy` + - Workflow: + * `_raw` + - Environment variables: + - *CM_DATASET_PREPROCESSED_EXTENSION*: `raw` + - Workflow: + * `_rgb32` + - Environment variables: + - *CM_DATASET_PREPROCESSED_EXTENSION*: `rgb32` + - Workflow: + * `_rgb8` + - Environment variables: + - *CM_DATASET_PREPROCESSED_EXTENSION*: `rgb8` + - Workflow: + +
+ + + * Group "**filter-size**" +
+ Click here to expand this section. + + * `_filter-size.#` + - Workflow: + +
+ + + * Group "**interpolation-method**" +
+ Click here to expand this section. + + * `_inter.area` + - Environment variables: + - *CM_DATASET_INTERPOLATION_METHOD*: `INTER_AREA` + - Workflow: + * `_inter.linear` + - Environment variables: + - *CM_DATASET_INTERPOLATION_METHOD*: `INTER_LINEAR` + - Workflow: + +
+ + + * Group "**preprocessing-source**" +
+ Click here to expand this section. + + * `_generic-preprocessor` + - Environment variables: + - *CM_DATASET_REFERENCE_PREPROCESSOR*: `0` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_torch + * CM names: `--adr.['torch', 'pytorch']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torchvision + * CM names: `--adr.['torchvision']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Read "prehook_deps" on other CM scripts*** + * get,generic,image-preprocessor + - CM script: [get-preprocesser-script-generic](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocesser-script-generic) + * **`_mlcommons-reference-preprocessor`** (default) + - Environment variables: + - *CM_DATASET_REFERENCE_PREPROCESSOR*: `1` + - Workflow: + +
+ + +#### Default variations + +`_50,_NCHW,_default-annotations,_fp32,_mlcommons-reference-preprocessor,_validation` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--dir=value` → `CM_DATASET_PREPROCESSED_PATH=value` +* `--threads=value` → `CM_NUM_PREPROCESS_THREADS=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "dir":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_DATASET: `OPENIMAGES` +* CM_DATASET_DTYPE: `fp32` +* CM_DATASET_INPUT_SQUARE_SIDE: `800` +* CM_DATASET_CROP_FACTOR: `100.0` +* CM_DATASET_QUANT_SCALE: `1` +* CM_DATASET_QUANTIZE: `0` +* CM_DATASET_QUANT_OFFSET: `0` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openimages/_cm.json)*** + * get,python3 + * CM names: `--adr.['python3', 'python']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,dataset,object-detection,openimages,original + * CM names: `--adr.['original-dataset']...` + - CM script: [get-dataset-openimages](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openimages) + * mlperf,mlcommons,inference,source,src + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,generic-python-lib,_pycocotools + * CM names: `--adr.['pycocotools']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_opencv-python + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_pillow + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.ujson + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_numpy + * CM names: `--adr.['numpy']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_numpy + * CM names: `--adr.['numpy']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openimages/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openimages/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openimages/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openimages/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openimages/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openimages/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openimages/_cm.json) + +___ +### Script output +`cmr "get dataset openimages open-images object-detection preprocessed [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_DATASET_*` +#### New environment keys auto-detected from customize + +* `CM_DATASET_ANNOTATIONS_DIR_PATH` +* `CM_DATASET_ANNOTATIONS_FILE_PATH` +* `CM_DATASET_PREPROCESSED_IMAGENAMES_LIST` +* `CM_DATASET_PREPROCESSED_IMAGES_LIST` +* `CM_DATASET_PREPROCESSED_PATH` +* `CM_DATASET_QUANT_OFFSET` +* `CM_DATASET_QUANT_SCALE` +* `CM_DATASET_TYPE` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-preprocessed-dataset-openorca.md b/docs/AI-ML-datasets/get-preprocessed-dataset-openorca.md new file mode 100644 index 0000000000..cd4e07dd97 --- /dev/null +++ b/docs/AI-ML-datasets/get-preprocessed-dataset-openorca.md @@ -0,0 +1,178 @@ +Automatically generated README for this automation recipe: **get-preprocessed-dataset-openorca** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-preprocessed-dataset-openorca,5614c39cb1564d72) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openorca)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,dataset,openorca,language-processing,preprocessed* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get dataset openorca language-processing preprocessed" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,dataset,openorca,language-processing,preprocessed` + +`cm run script --tags=get,dataset,openorca,language-processing,preprocessed[,variations] ` + +*or* + +`cmr "get dataset openorca language-processing preprocessed"` + +`cmr "get dataset openorca language-processing preprocessed [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,dataset,openorca,language-processing,preprocessed' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,dataset,openorca,language-processing,preprocessed"``` + +#### Run this script via Docker (beta) + +`cm docker script "get dataset openorca language-processing preprocessed[variations]" ` + +___ +### Customization + + +#### Variations + + * Group "**dataset-type**" +
+ Click here to expand this section. + + * `_calibration` + - Environment variables: + - *CM_DATASET_CALIBRATION*: `yes` + - Workflow: + * **`_validation`** (default) + - Environment variables: + - *CM_DATASET_CALIBRATION*: `no` + - Workflow: + +
+ + + * Group "**size**" +
+ Click here to expand this section. + + * **`_60`** (default) + - Workflow: + * `_full` + - Workflow: + * `_size.#` + - Workflow: + +
+ + +#### Default variations + +`_60,_validation` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_DATASET_CALIBRATION: `no` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openorca/_cm.json)*** + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,dataset,original,openorca + * CM names: `--adr.['openorca-original', 'dataset-original']...` + - CM script: [get-dataset-openorca](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openorca) + * mlperf,inference,source + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,generic-python-lib,_package.pyarrow + * CM names: `--adr.['pyarrow']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.fastparquet + * CM names: `--adr.['fastparquet']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,ml-model,llama2 + - CM script: [get-ml-model-llama2](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-llama2) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openorca/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openorca/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openorca/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openorca/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openorca/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openorca/_cm.json) + +___ +### Script output +`cmr "get dataset openorca language-processing preprocessed [,variations]" -j` +#### New environment keys (filter) + +* `CM_DATASET_PREPROCESSED_PATH` +#### New environment keys auto-detected from customize + +* `CM_DATASET_PREPROCESSED_PATH` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-preprocessed-dataset-squad.md b/docs/AI-ML-datasets/get-preprocessed-dataset-squad.md new file mode 100644 index 0000000000..c7d80cfd0a --- /dev/null +++ b/docs/AI-ML-datasets/get-preprocessed-dataset-squad.md @@ -0,0 +1,238 @@ +Automatically generated README for this automation recipe: **get-preprocessed-dataset-squad** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-preprocessed-dataset-squad,7cd1d9b7e8af4788) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-squad)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *get,dataset,preprocessed,tokenized,squad* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get dataset preprocessed tokenized squad" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,dataset,preprocessed,tokenized,squad` + +`cm run script --tags=get,dataset,preprocessed,tokenized,squad[,variations] ` + +*or* + +`cmr "get dataset preprocessed tokenized squad"` + +`cmr "get dataset preprocessed tokenized squad [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,dataset,preprocessed,tokenized,squad' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,dataset,preprocessed,tokenized,squad"``` + +#### Run this script via Docker (beta) + +`cm docker script "get dataset preprocessed tokenized squad[variations]" ` + +___ +### Customization + + +#### Variations + + * Group "**calibration-set**" +
+ Click here to expand this section. + + * `_calib1` + - Environment variables: + - *CM_DATASET_SQUAD_CALIBRATION_SET*: `one` + - Workflow: + * `_calib2` + - Environment variables: + - *CM_DATASET_SQUAD_CALIBRATION_SET*: `two` + - Workflow: + * **`_no-calib`** (default) + - Environment variables: + - *CM_DATASET_SQUAD_CALIBRATION_SET*: `` + - Workflow: + +
+ + + * Group "**doc-stride**" +
+ Click here to expand this section. + + * `_doc-stride.#` + - Environment variables: + - *CM_DATASET_DOC_STRIDE*: `#` + - Workflow: + * **`_doc-stride.128`** (default) + - Environment variables: + - *CM_DATASET_DOC_STRIDE*: `128` + - Workflow: + +
+ + + * Group "**packing**" +
+ Click here to expand this section. + + * `_packed` + - Environment variables: + - *CM_DATASET_SQUAD_PACKED*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,preprocessed,squad,_pickle + - CM script: [get-preprocessed-dataset-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-squad) + +
+ + + * Group "**raw**" +
+ Click here to expand this section. + + * `_pickle` + - Environment variables: + - *CM_DATASET_RAW*: `no` + - Workflow: + * **`_raw`** (default) + - Environment variables: + - *CM_DATASET_RAW*: `yes` + - Workflow: + +
+ + + * Group "**seq-length**" +
+ Click here to expand this section. + + * `_seq-length.#` + - Environment variables: + - *CM_DATASET_MAX_SEQ_LENGTH*: `#` + - Workflow: + * **`_seq-length.384`** (default) + - Environment variables: + - *CM_DATASET_MAX_SEQ_LENGTH*: `384` + - Workflow: + +
+ + +#### Default variations + +`_doc-stride.128,_no-calib,_raw,_seq-length.384` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-squad/_cm.yaml)*** + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,mlperf,inference,src + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,squad,dataset,original + * CM names: `--adr.['squad-dataset']...` + - CM script: [get-dataset-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-squad) + * get,squad,vocab + * CM names: `--adr.['squad-vocab']...` + - CM script: [get-bert-squad-vocab](https://github.com/mlcommons/cm4mlops/tree/master/script/get-bert-squad-vocab) + * get,generic-python-lib,_package.tokenization + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.transformers + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.tensorflow + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-squad/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-squad/_cm.yaml) + 1. ***Run native script if exists*** + * [run-packed.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-squad/run-packed.sh) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-squad/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-squad/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-squad/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-squad/_cm.yaml) + +___ +### Script output +`cmr "get dataset preprocessed tokenized squad [,variations]" -j` +#### New environment keys (filter) + +* `CM_DATASET_SQUAD_TOKENIZED_*` +#### New environment keys auto-detected from customize + +* `CM_DATASET_SQUAD_TOKENIZED_DOC_STRIDE` +* `CM_DATASET_SQUAD_TOKENIZED_INPUT_IDS` +* `CM_DATASET_SQUAD_TOKENIZED_INPUT_MASK` +* `CM_DATASET_SQUAD_TOKENIZED_MAX_QUERY_LENGTH` +* `CM_DATASET_SQUAD_TOKENIZED_MAX_SEQ_LENGTH` +* `CM_DATASET_SQUAD_TOKENIZED_PACKED_FILENAMES_FILE` +* `CM_DATASET_SQUAD_TOKENIZED_PICKLE_FILE` +* `CM_DATASET_SQUAD_TOKENIZED_ROOT` +* `CM_DATASET_SQUAD_TOKENIZED_SEGMENT_IDS` \ No newline at end of file diff --git a/docs/AI-ML-frameworks/get-google-saxml.md b/docs/AI-ML-frameworks/get-google-saxml.md new file mode 100644 index 0000000000..5a7e3d351d --- /dev/null +++ b/docs/AI-ML-frameworks/get-google-saxml.md @@ -0,0 +1,133 @@ +Automatically generated README for this automation recipe: **get-google-saxml** + +Category: **AI/ML frameworks** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-google-saxml,5d7b17d84b5a48fb) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-saxml)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *get,google,saxml* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get google saxml" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,google,saxml` + +`cm run script --tags=get,google,saxml ` + +*or* + +`cmr "get google saxml"` + +`cmr "get google saxml " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,google,saxml' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,google,saxml"``` + +#### Run this script via Docker (beta) + +`cm docker script "get google saxml" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `master` + +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-saxml/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,python3 + * CM names: `--adr.['python3', 'python']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,git,_repo.https://github.com/google/saxml + * CM names: `--adr.['google-saxml-git-src']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + * get,bazel + * CM names: `--adr.['bazel']...` + - CM script: [get-bazel](https://github.com/mlcommons/cm4mlops/tree/master/script/get-bazel) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-saxml/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-saxml/_cm.yaml) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-saxml/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-saxml/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-saxml/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-saxml/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-saxml/_cm.yaml) + +___ +### Script output +`cmr "get google saxml " -j` +#### New environment keys (filter) + +* `CM_GOOGLE_SAXML*` +#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-frameworks/get-onnxruntime-prebuilt.md b/docs/AI-ML-frameworks/get-onnxruntime-prebuilt.md new file mode 100644 index 0000000000..20419da084 --- /dev/null +++ b/docs/AI-ML-frameworks/get-onnxruntime-prebuilt.md @@ -0,0 +1,157 @@ +Automatically generated README for this automation recipe: **get-onnxruntime-prebuilt** + +Category: **AI/ML frameworks** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-onnxruntime-prebuilt,be02c84ff57c4244) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-onnxruntime-prebuilt)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,onnxruntime,get,prebuilt,lib,lang-c,lang-cpp* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install onnxruntime get prebuilt lib lang-c lang-cpp" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,onnxruntime,get,prebuilt,lib,lang-c,lang-cpp` + +`cm run script --tags=install,onnxruntime,get,prebuilt,lib,lang-c,lang-cpp[,variations] ` + +*or* + +`cmr "install onnxruntime get prebuilt lib lang-c lang-cpp"` + +`cmr "install onnxruntime get prebuilt lib lang-c lang-cpp [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,onnxruntime,get,prebuilt,lib,lang-c,lang-cpp' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,onnxruntime,get,prebuilt,lib,lang-c,lang-cpp"``` + +#### Run this script via Docker (beta) + +`cm docker script "install onnxruntime get prebuilt lib lang-c lang-cpp[variations]" ` + +___ +### Customization + + +#### Variations + + * Group "**device**" +
+ Click here to expand this section. + + * **`_cpu`** (default) + - Environment variables: + - *CM_ONNXRUNTIME_DEVICE*: `` + - Workflow: + * `_cuda` + - Environment variables: + - *CM_ONNXRUNTIME_DEVICE*: `gpu` + - Workflow: + +
+ + +#### Default variations + +`_cpu` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `1.16.3` + +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-onnxruntime-prebuilt/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-onnxruntime-prebuilt/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-onnxruntime-prebuilt/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-onnxruntime-prebuilt/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-onnxruntime-prebuilt/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-onnxruntime-prebuilt/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-onnxruntime-prebuilt/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-onnxruntime-prebuilt/_cm.json) + +___ +### Script output +`cmr "install onnxruntime get prebuilt lib lang-c lang-cpp [,variations]" -j` +#### New environment keys (filter) + +* `+CPLUS_INCLUDE_PATH` +* `+C_INCLUDE_PATH` +* `+DYLD_FALLBACK_LIBRARY_PATH` +* `+LD_LIBRARY_PATH` +* `+PATH` +* `CM_ONNXRUNTIME_INCLUDE_PATH` +* `CM_ONNXRUNTIME_LIB_PATH` +#### New environment keys auto-detected from customize + +* `CM_ONNXRUNTIME_INCLUDE_PATH` +* `CM_ONNXRUNTIME_LIB_PATH` \ No newline at end of file diff --git a/docs/AI-ML-frameworks/get-qaic-apps-sdk.md b/docs/AI-ML-frameworks/get-qaic-apps-sdk.md new file mode 100644 index 0000000000..8365953960 --- /dev/null +++ b/docs/AI-ML-frameworks/get-qaic-apps-sdk.md @@ -0,0 +1,124 @@ +Automatically generated README for this automation recipe: **get-qaic-apps-sdk** + +Category: **AI/ML frameworks** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-qaic-apps-sdk,0a9e206af6764da9) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-apps-sdk)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,detect,qaic,apps,sdk,apps-sdk,qaic-apps-sdk* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get detect qaic apps sdk apps-sdk qaic-apps-sdk" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,detect,qaic,apps,sdk,apps-sdk,qaic-apps-sdk` + +`cm run script --tags=get,detect,qaic,apps,sdk,apps-sdk,qaic-apps-sdk ` + +*or* + +`cmr "get detect qaic apps sdk apps-sdk qaic-apps-sdk"` + +`cmr "get detect qaic apps sdk apps-sdk qaic-apps-sdk " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,detect,qaic,apps,sdk,apps-sdk,qaic-apps-sdk' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,detect,qaic,apps,sdk,apps-sdk,qaic-apps-sdk"``` + +#### Run this script via Docker (beta) + +`cm docker script "get detect qaic apps sdk apps-sdk qaic-apps-sdk" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-apps-sdk/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-apps-sdk/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-apps-sdk/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-apps-sdk/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-apps-sdk/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-apps-sdk/_cm.json) + +___ +### Script output +`cmr "get detect qaic apps sdk apps-sdk qaic-apps-sdk " -j` +#### New environment keys (filter) + +* `+CPLUS_INCLUDE_PATH` +* `+C_INCLUDE_PATH` +* `+DYLD_FALLBACK_LIBRARY_PATH` +* `+LD_LIBRARY_PATH` +* `+PATH` +* `CM_QAIC_EXEC_PATH` +#### New environment keys auto-detected from customize + +* `CM_QAIC_EXEC_PATH` \ No newline at end of file diff --git a/docs/AI-ML-frameworks/get-qaic-platform-sdk.md b/docs/AI-ML-frameworks/get-qaic-platform-sdk.md new file mode 100644 index 0000000000..f712c98591 --- /dev/null +++ b/docs/AI-ML-frameworks/get-qaic-platform-sdk.md @@ -0,0 +1,128 @@ +Automatically generated README for this automation recipe: **get-qaic-platform-sdk** + +Category: **AI/ML frameworks** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-qaic-platform-sdk,a60f86918dc9457d) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-platform-sdk)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,detect,qaic,platform,sdk,platform-sdk,qaic-platform-sdk* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get detect qaic platform sdk platform-sdk qaic-platform-sdk" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,detect,qaic,platform,sdk,platform-sdk,qaic-platform-sdk` + +`cm run script --tags=get,detect,qaic,platform,sdk,platform-sdk,qaic-platform-sdk ` + +*or* + +`cmr "get detect qaic platform sdk platform-sdk qaic-platform-sdk"` + +`cmr "get detect qaic platform sdk platform-sdk qaic-platform-sdk " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,detect,qaic,platform,sdk,platform-sdk,qaic-platform-sdk' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,detect,qaic,platform,sdk,platform-sdk,qaic-platform-sdk"``` + +#### Run this script via Docker (beta) + +`cm docker script "get detect qaic platform sdk platform-sdk qaic-platform-sdk" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-platform-sdk/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-platform-sdk/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-platform-sdk/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-platform-sdk/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-platform-sdk/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-platform-sdk/_cm.json) + +___ +### Script output +`cmr "get detect qaic platform sdk platform-sdk qaic-platform-sdk " -j` +#### New environment keys (filter) + +* `+CPLUS_INCLUDE_PATH` +* `+C_INCLUDE_PATH` +* `+DYLD_FALLBACK_LIBRARY_PATH` +* `+LD_LIBRARY_PATH` +* `+PATH` +* `CM_QAIC_RUNNER_PATH` +* `CM_QAIC_TOOLS_PATH` +#### New environment keys auto-detected from customize + +* `CM_QAIC_RUNNER_PATH` +* `CM_QAIC_TOOLS_PATH` \ No newline at end of file diff --git a/docs/AI-ML-frameworks/get-qaic-software-kit.md b/docs/AI-ML-frameworks/get-qaic-software-kit.md new file mode 100644 index 0000000000..62ab27a7c5 --- /dev/null +++ b/docs/AI-ML-frameworks/get-qaic-software-kit.md @@ -0,0 +1,176 @@ +Automatically generated README for this automation recipe: **get-qaic-software-kit** + +Category: **AI/ML frameworks** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-qaic-software-kit,3344655922694bbb) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-software-kit)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,qaic,software,kit,qaic-software-kit* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get qaic software kit qaic-software-kit" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,qaic,software,kit,qaic-software-kit` + +`cm run script --tags=get,qaic,software,kit,qaic-software-kit[,variations] ` + +*or* + +`cmr "get qaic software kit qaic-software-kit"` + +`cmr "get qaic software kit qaic-software-kit [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,qaic,software,kit,qaic-software-kit' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,qaic,software,kit,qaic-software-kit"``` + +#### Run this script via Docker (beta) + +`cm docker script "get qaic software kit qaic-software-kit[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_branch.#` + - Environment variables: + - *CM_GIT_CHECKOUT*: `#` + - Workflow: + +
+ + + * Group "**repo-source**" +
+ Click here to expand this section. + + * `_repo.#` + - Environment variables: + - *CM_GIT_URL*: `#` + - Workflow: + * **`_repo.quic`** (default) + - Environment variables: + - *CM_GIT_URL*: `https://github.com/quic/software-kit-for-qualcomm-cloud-ai-100` + - Workflow: + +
+ + +#### Default variations + +`_repo.quic` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-software-kit/_cm.json)*** + * get,git,repo + * CM names: `--adr.['qaic-software-git-repo']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + * get,generic,sys-util,_libudev-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic,sys-util,_libpci-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,google,test + - CM script: [get-google-test](https://github.com/mlcommons/cm4mlops/tree/master/script/get-google-test) + * get,cmake + * CM names: `--adr.['cmake']...` + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + * get,compiler + * CM names: `--adr.['compiler']...` + - CM script: [get-cl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cl) + - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) + - CM script: [get-llvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-llvm) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-software-kit/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-software-kit/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-software-kit/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-software-kit/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-software-kit/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-software-kit/_cm.json) + +___ +### Script output +`cmr "get qaic software kit qaic-software-kit [,variations]" -j` +#### New environment keys (filter) + +* `+PATH` +* `CM_QAIC_RUNNER_PATH` +* `CM_QAIC_SOFTWARE_KIT_PATH` +#### New environment keys auto-detected from customize + +* `CM_QAIC_RUNNER_PATH` +* `CM_QAIC_SOFTWARE_KIT_PATH` \ No newline at end of file diff --git a/docs/AI-ML-frameworks/get-rocm.md b/docs/AI-ML-frameworks/get-rocm.md new file mode 100644 index 0000000000..ed5e7b629c --- /dev/null +++ b/docs/AI-ML-frameworks/get-rocm.md @@ -0,0 +1,126 @@ +Automatically generated README for this automation recipe: **get-rocm** + +Category: **AI/ML frameworks** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-rocm,23a69f9477cb4dab) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rocm)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,rocm,get-rocm* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get rocm get-rocm" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,rocm,get-rocm` + +`cm run script --tags=get,rocm,get-rocm ` + +*or* + +`cmr "get rocm get-rocm"` + +`cmr "get rocm get-rocm " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,rocm,get-rocm' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,rocm,get-rocm"``` + +#### Run this script via Docker (beta) + +`cm docker script "get rocm get-rocm" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rocm/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rocm/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rocm/_cm.json)*** + * install,rocm + * Enable this dependency only if all ENV vars are set:
+`{'CM_REQUIRE_INSTALL': ['yes']}` + - CM script: [install-rocm](https://github.com/mlcommons/cm4mlops/tree/master/script/install-rocm) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rocm/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rocm/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rocm/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rocm/_cm.json) + +___ +### Script output +`cmr "get rocm get-rocm " -j` +#### New environment keys (filter) + +* `+PATH` +* `CM_ROCM_*` +#### New environment keys auto-detected from customize + +* `CM_ROCM_CACHE_TAGS` +* `CM_ROCM_INSTALLED_PATH` \ No newline at end of file diff --git a/docs/AI-ML-frameworks/get-tvm.md b/docs/AI-ML-frameworks/get-tvm.md new file mode 100644 index 0000000000..af40c0419a --- /dev/null +++ b/docs/AI-ML-frameworks/get-tvm.md @@ -0,0 +1,198 @@ +Automatically generated README for this automation recipe: **get-tvm** + +Category: **AI/ML frameworks** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-tvm,93c89140e6224f4b) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,tvm,get-tvm* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get tvm get-tvm" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,tvm,get-tvm` + +`cm run script --tags=get,tvm,get-tvm[,variations] ` + +*or* + +`cmr "get tvm get-tvm"` + +`cmr "get tvm get-tvm [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,tvm,get-tvm' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,tvm,get-tvm"``` + +#### Run this script via Docker (beta) + +`cm docker script "get tvm get-tvm[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_cuda` + - Environment variables: + - *CM_TVM_USE_CUDA*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,cuda + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * `_openmp` + - Environment variables: + - *CM_TVM_USE_OPENMP*: `yes` + - Workflow: + +
+ + + * Group "**installation-type**" +
+ Click here to expand this section. + + * **`_llvm`** (default) + - Environment variables: + - *CM_TVM_USE_LLVM*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,llvm + * CM names: `--adr.['llvm']...` + - CM script: [get-llvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-llvm) + * `_pip-install` + - Environment variables: + - *CM_TVM_PIP_INSTALL*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_apache-tvm + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + +
+ + +#### Default variations + +`_llvm` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_GIT_CHECKOUT: `main` +* CM_GIT_URL: `https://github.com/apache/tvm` +* CM_TVM_PIP_INSTALL: `no` + +
+ +#### Versions +* `main` +* `v0.10.0` +* `v0.7.0` +* `v0.8.0` +* `v0.9.0` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm/_cm.json)*** + * cmake,get-cmake + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,generic-python-lib,_typing_extensions + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_decorator + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_scipy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_attrs + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_psutil + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm/_cm.json) + +___ +### Script output +`cmr "get tvm get-tvm [,variations]" -j` +#### New environment keys (filter) + +* `+DYLD_FALLBACK_LIBRARY_PATH` +* `+LD_LIBRARY_PATH` +* `+PYTHONPATH` +* `CM_TVM_*` +* `TVM_HOME` +#### New environment keys auto-detected from customize + +* `CM_TVM_PATH_INCLUDE` +* `CM_TVM_PATH_LIB` \ No newline at end of file diff --git a/docs/AI-ML-frameworks/install-qaic-compute-sdk-from-src.md b/docs/AI-ML-frameworks/install-qaic-compute-sdk-from-src.md new file mode 100644 index 0000000000..b8895826d5 --- /dev/null +++ b/docs/AI-ML-frameworks/install-qaic-compute-sdk-from-src.md @@ -0,0 +1,199 @@ +Automatically generated README for this automation recipe: **install-qaic-compute-sdk-from-src** + +Category: **AI/ML frameworks** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-qaic-compute-sdk-from-src,9701bdda97fa4045) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-qaic-compute-sdk-from-src)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,qaic,from.src,software,compute,compute-sdk,qaic-compute-sdk,sdk* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get qaic from.src software compute compute-sdk qaic-compute-sdk sdk" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,qaic,from.src,software,compute,compute-sdk,qaic-compute-sdk,sdk` + +`cm run script --tags=get,qaic,from.src,software,compute,compute-sdk,qaic-compute-sdk,sdk[,variations] ` + +*or* + +`cmr "get qaic from.src software compute compute-sdk qaic-compute-sdk sdk"` + +`cmr "get qaic from.src software compute compute-sdk qaic-compute-sdk sdk [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,qaic,from.src,software,compute,compute-sdk,qaic-compute-sdk,sdk' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,qaic,from.src,software,compute,compute-sdk,qaic-compute-sdk,sdk"``` + +#### Run this script via Docker (beta) + +`cm docker script "get qaic from.src software compute compute-sdk qaic-compute-sdk sdk[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_branch.#` + - Environment variables: + - *CM_GIT_CHECKOUT*: `#` + - Workflow: + +
+ + + * Group "**installation-mode**" +
+ Click here to expand this section. + + * `_debug` + - Environment variables: + - *CM_QAIC_COMPUTE_SDK_INSTALL_MODE*: `debug` + - Workflow: + * **`_release`** (default) + - Environment variables: + - *CM_QAIC_COMPUTE_SDK_INSTALL_MODE*: `release` + - Workflow: + * `_release-assert` + - Environment variables: + - *CM_QAIC_COMPUTE_SDK_INSTALL_MODE*: `release-assert` + - Workflow: + +
+ + + * Group "**repo-source**" +
+ Click here to expand this section. + + * `_repo.#` + - Environment variables: + - *CM_GIT_URL*: `#` + - Workflow: + * **`_repo.quic`** (default) + - Environment variables: + - *CM_GIT_URL*: `https://github.com/quic/software-kit-for-qualcomm-cloud-ai-100-cc` + - Workflow: + +
+ + +#### Default variations + +`_release,_repo.quic` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-qaic-compute-sdk-from-src/_cm.json)*** + * get,git,repo,_repo.https://github.com/quic/software-kit-for-qualcomm-cloud-ai-100-cc + * CM names: `--adr.['qaic-software-git-repo']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + * get,cmake + * CM names: `--adr.['cmake']...` + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + * get,llvm,_from-src + * CM names: `--adr.['llvm']...` + - CM script: [get-llvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-llvm) + * get,generic,sys-util,_libudev-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic,sys-util,_libpci-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,google,test + - CM script: [get-google-test](https://github.com/mlcommons/cm4mlops/tree/master/script/get-google-test) + * get,generic-sys-util,_ninja-build + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic-sys-util,_rsync + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * download-and-extract,_extract,_url.https://codelinaro.jfrog.io/artifactory/codelinaro-toolchain-for-hexagon/v15.0.5/clang+llvm-15.0.5-cross-hexagon-unknown-linux-musl.tar.xz + * CM names: `--adr.['dae']...` + - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-qaic-compute-sdk-from-src/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-qaic-compute-sdk-from-src/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-qaic-compute-sdk-from-src/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-qaic-compute-sdk-from-src/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-qaic-compute-sdk-from-src/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-qaic-compute-sdk-from-src/_cm.json) + +___ +### Script output +`cmr "get qaic from.src software compute compute-sdk qaic-compute-sdk sdk [,variations]" -j` +#### New environment keys (filter) + +* `+PATH` +* `CM_QAIC_COMPUTE_SDK_PATH` +#### New environment keys auto-detected from customize + +* `CM_QAIC_COMPUTE_SDK_PATH` \ No newline at end of file diff --git a/docs/AI-ML-frameworks/install-rocm.md b/docs/AI-ML-frameworks/install-rocm.md new file mode 100644 index 0000000000..019cd2cd63 --- /dev/null +++ b/docs/AI-ML-frameworks/install-rocm.md @@ -0,0 +1,129 @@ +Automatically generated README for this automation recipe: **install-rocm** + +Category: **AI/ML frameworks** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-rocm,9d13f90463ce4545) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-rocm)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,rocm,install-rocm* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install rocm install-rocm" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,rocm,install-rocm` + +`cm run script --tags=install,rocm,install-rocm ` + +*or* + +`cmr "install rocm install-rocm"` + +`cmr "install rocm install-rocm " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,rocm,install-rocm' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,rocm,install-rocm"``` + +#### Run this script via Docker (beta) + +`cm docker script "install rocm install-rocm" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `5.7.1` + +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-rocm/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-rocm/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-rocm/_cm.json) + 1. ***Run native script if exists*** + * [run-rhel.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-rocm/run-rhel.sh) + * [run-ubuntu.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-rocm/run-ubuntu.sh) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-rocm/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-rocm/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-rocm/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-rocm/_cm.json) + +___ +### Script output +`cmr "install rocm install-rocm " -j` +#### New environment keys (filter) + +* `+PATH` +* `CM_ROCM_*` +#### New environment keys auto-detected from customize + +* `CM_ROCM_BIN_WITH_PATH` +* `CM_ROCM_INSTALLED_PATH` \ No newline at end of file diff --git a/docs/AI-ML-frameworks/install-tensorflow-for-c.md b/docs/AI-ML-frameworks/install-tensorflow-for-c.md new file mode 100644 index 0000000000..845aae4516 --- /dev/null +++ b/docs/AI-ML-frameworks/install-tensorflow-for-c.md @@ -0,0 +1,122 @@ +Automatically generated README for this automation recipe: **install-tensorflow-for-c** + +Category: **AI/ML frameworks** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-tensorflow-for-c,d73783d8302547d7) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-for-c)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,tensorflow,lib,lang-c* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install tensorflow lib lang-c" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,tensorflow,lib,lang-c` + +`cm run script --tags=install,tensorflow,lib,lang-c ` + +*or* + +`cmr "install tensorflow lib lang-c"` + +`cmr "install tensorflow lib lang-c " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,tensorflow,lib,lang-c' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,tensorflow,lib,lang-c"``` + +#### Run this script via Docker (beta) + +`cm docker script "install tensorflow lib lang-c" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `2.8.0` + +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-for-c/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-for-c/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-for-c/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-for-c/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-for-c/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-for-c/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-for-c/_cm.json) + +___ +### Script output +`cmr "install tensorflow lib lang-c " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-frameworks/install-tensorflow-from-src.md b/docs/AI-ML-frameworks/install-tensorflow-from-src.md new file mode 100644 index 0000000000..4421e0df64 --- /dev/null +++ b/docs/AI-ML-frameworks/install-tensorflow-from-src.md @@ -0,0 +1,165 @@ +Automatically generated README for this automation recipe: **install-tensorflow-from-src** + +Category: **AI/ML frameworks** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-tensorflow-from-src,a974533c4c854597) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-from-src)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,install,tensorflow,lib,source,from-source,from-src,src,from.src* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get install tensorflow lib source from-source from-src src from.src" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,install,tensorflow,lib,source,from-source,from-src,src,from.src` + +`cm run script --tags=get,install,tensorflow,lib,source,from-source,from-src,src,from.src[,variations] ` + +*or* + +`cmr "get install tensorflow lib source from-source from-src src from.src"` + +`cmr "get install tensorflow lib source from-source from-src src from.src [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,install,tensorflow,lib,source,from-source,from-src,src,from.src' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,install,tensorflow,lib,source,from-source,from-src,src,from.src"``` + +#### Run this script via Docker (beta) + +`cm docker script "get install tensorflow lib source from-source from-src src from.src[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_tflite` + - Environment variables: + - *CM_TFLITE*: `on` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_GIT_URL: `https://github.com/tensorflow/tensorflow` +* CM_GIT_DEPTH: `1` +* CM_TFLITE: `off` + +
+ +#### Versions +Default version: `master` + +* `master` +* `v1.15.0` +* `v2.0.0` +* `v2.1.0` +* `v2.2.0` +* `v2.3.0` +* `v2.4.0` +* `v2.5.0` +* `v2.6.0` +* `v2.7.0` +* `v2.8.0` +* `v2.9.0` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-from-src/_cm.json)*** + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,generic-sys-util,_zlib + * Enable this dependency only if all ENV vars are set:
+`{'CM_HOST_OS_FLAVOR': ['ubuntu'], 'CM_HOST_OS_VERSION': ['18.04']}` + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic-python-lib,_package.numpy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-from-src/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-from-src/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-from-src/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-from-src/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-from-src/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-from-src/_cm.json) + +___ +### Script output +`cmr "get install tensorflow lib source from-source from-src src from.src [,variations]" -j` +#### New environment keys (filter) + +* `+CPLUS_INCLUDE_PATH` +* `+C_INCLUDE_PATH` +* `+DYLD_FALLBACK_LIBRARY_PATH` +* `+LD_LIBRARY_PATH` +#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-frameworks/install-tflite-from-src.md b/docs/AI-ML-frameworks/install-tflite-from-src.md new file mode 100644 index 0000000000..aa40f96eb5 --- /dev/null +++ b/docs/AI-ML-frameworks/install-tflite-from-src.md @@ -0,0 +1,135 @@ +Automatically generated README for this automation recipe: **install-tflite-from-src** + +Category: **AI/ML frameworks** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-tflite-from-src,5c72dab5eb88407c) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tflite-from-src)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,install,tflite-cmake,tensorflow-lite-cmake,from-src* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get install tflite-cmake tensorflow-lite-cmake from-src" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,install,tflite-cmake,tensorflow-lite-cmake,from-src` + +`cm run script --tags=get,install,tflite-cmake,tensorflow-lite-cmake,from-src ` + +*or* + +`cmr "get install tflite-cmake tensorflow-lite-cmake from-src"` + +`cmr "get install tflite-cmake tensorflow-lite-cmake from-src " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,install,tflite-cmake,tensorflow-lite-cmake,from-src' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,install,tflite-cmake,tensorflow-lite-cmake,from-src"``` + +#### Run this script via Docker (beta) + +`cm docker script "get install tflite-cmake tensorflow-lite-cmake from-src" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_GIT_DEPTH: `1` + +
+ +#### Versions +Default version: `master` + +* `master` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tflite-from-src/_cm.json)*** + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,compiler + * CM names: `--adr.['compiler']...` + - CM script: [get-cl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cl) + - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) + - CM script: [get-llvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-llvm) + * get,cmake + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tflite-from-src/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tflite-from-src/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tflite-from-src/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tflite-from-src/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tflite-from-src/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tflite-from-src/_cm.json) + +___ +### Script output +`cmr "get install tflite-cmake tensorflow-lite-cmake from-src " -j` +#### New environment keys (filter) + +* `+CPLUS_INCLUDE_PATH` +* `+C_INCLUDE_PATH` +* `+DYLD_FALLBACK_LIBRARY_PATH` +* `+LD_LIBRARY_PATH` +#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-models/convert-ml-model-huggingface-to-onnx.md b/docs/AI-ML-models/convert-ml-model-huggingface-to-onnx.md new file mode 100644 index 0000000000..4c409f9923 --- /dev/null +++ b/docs/AI-ML-models/convert-ml-model-huggingface-to-onnx.md @@ -0,0 +1,143 @@ +Automatically generated README for this automation recipe: **convert-ml-model-huggingface-to-onnx** + +Category: **AI/ML models** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=convert-ml-model-huggingface-to-onnx,eacb01655d7e49ac) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-ml-model-huggingface-to-onnx)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *ml-model,model,huggingface-to-onnx,onnx,huggingface,convert* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "ml-model model huggingface-to-onnx onnx huggingface convert" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=ml-model,model,huggingface-to-onnx,onnx,huggingface,convert` + +`cm run script --tags=ml-model,model,huggingface-to-onnx,onnx,huggingface,convert[,variations] ` + +*or* + +`cmr "ml-model model huggingface-to-onnx onnx huggingface convert"` + +`cmr "ml-model model huggingface-to-onnx onnx huggingface convert [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'ml-model,model,huggingface-to-onnx,onnx,huggingface,convert' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="ml-model,model,huggingface-to-onnx,onnx,huggingface,convert"``` + +#### Run this script via Docker (beta) + +`cm docker script "ml-model model huggingface-to-onnx onnx huggingface convert[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_model-path.#` + - Environment variables: + - *CM_MODEL_HUGG_PATH*: `#` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-ml-model-huggingface-to-onnx/_cm.json)*** + * get,python3 + * CM names: `--adr.['python3', 'python']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-python-lib,_transformers + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_onnxruntime + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-ml-model-huggingface-to-onnx/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-ml-model-huggingface-to-onnx/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-ml-model-huggingface-to-onnx/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-ml-model-huggingface-to-onnx/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-ml-model-huggingface-to-onnx/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-ml-model-huggingface-to-onnx/_cm.json) + +___ +### Script output +`cmr "ml-model model huggingface-to-onnx onnx huggingface convert [,variations]" -j` +#### New environment keys (filter) + +* `CM_ML_MODEL*` +* `CM_MODEL_HUGG_PATH` +* `HUGGINGFACE_ONNX_FILE_PATH` +#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-models/get-bert-squad-vocab.md b/docs/AI-ML-models/get-bert-squad-vocab.md new file mode 100644 index 0000000000..3067bcb2e3 --- /dev/null +++ b/docs/AI-ML-models/get-bert-squad-vocab.md @@ -0,0 +1,119 @@ +Automatically generated README for this automation recipe: **get-bert-squad-vocab** + +Category: **AI/ML models** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-bert-squad-vocab,2f99a545ce734157) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-bert-squad-vocab)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,bert,squad,bert-large,bert-squad,vocab* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get bert squad bert-large bert-squad vocab" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,bert,squad,bert-large,bert-squad,vocab` + +`cm run script --tags=get,bert,squad,bert-large,bert-squad,vocab ` + +*or* + +`cmr "get bert squad bert-large bert-squad vocab"` + +`cmr "get bert squad bert-large bert-squad vocab " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,bert,squad,bert-large,bert-squad,vocab' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,bert,squad,bert-large,bert-squad,vocab"``` + +#### Run this script via Docker (beta) + +`cm docker script "get bert squad bert-large bert-squad vocab" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-bert-squad-vocab/_cm.json) + 1. Run "preprocess" function from customize.py + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-bert-squad-vocab/_cm.json)*** + * download,file + - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-bert-squad-vocab/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-bert-squad-vocab/_cm.json) + +___ +### Script output +`cmr "get bert squad bert-large bert-squad vocab " -j` +#### New environment keys (filter) + +* `CM_ML_MODEL_BERT_VOCAB_FILE_WITH_PATH` +#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-models/get-dlrm.md b/docs/AI-ML-models/get-dlrm.md new file mode 100644 index 0000000000..9bb81a69a3 --- /dev/null +++ b/docs/AI-ML-models/get-dlrm.md @@ -0,0 +1,143 @@ +Automatically generated README for this automation recipe: **get-dlrm** + +Category: **AI/ML models** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dlrm,63680ac2449a4241) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dlrm)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,src,dlrm* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get src dlrm" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,src,dlrm` + +`cm run script --tags=get,src,dlrm[,variations] ` + +*or* + +`cmr "get src dlrm"` + +`cmr "get src dlrm [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,src,dlrm' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,src,dlrm"``` + +#### Run this script via Docker (beta) + +`cm docker script "get src dlrm[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_full-history` + - Environment variables: + - *CM_GIT_DEPTH*: `` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_GIT_DEPTH: `--depth 10` +* CM_GIT_PATCH: `no` +* CM_GIT_URL: `https://github.com/facebookresearch/dlrm.git` + +
+ +#### Versions +Default version: `main` + +* `main` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dlrm/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dlrm/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dlrm/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dlrm/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dlrm/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dlrm/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dlrm/_cm.json) + +___ +### Script output +`cmr "get src dlrm [,variations]" -j` +#### New environment keys (filter) + +* `DLRM_DIR` +#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-models/get-ml-model-3d-unet-kits19.md b/docs/AI-ML-models/get-ml-model-3d-unet-kits19.md new file mode 100644 index 0000000000..1ae4ae5724 --- /dev/null +++ b/docs/AI-ML-models/get-ml-model-3d-unet-kits19.md @@ -0,0 +1,200 @@ +Automatically generated README for this automation recipe: **get-ml-model-3d-unet-kits19** + +Category: **AI/ML models** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-3d-unet-kits19,fb7e31419c0f4226) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-3d-unet-kits19)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,ml-model,raw,3d-unet,kits19,medical-imaging* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get ml-model raw 3d-unet kits19 medical-imaging" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,ml-model,raw,3d-unet,kits19,medical-imaging` + +`cm run script --tags=get,ml-model,raw,3d-unet,kits19,medical-imaging[,variations] ` + +*or* + +`cmr "get ml-model raw 3d-unet kits19 medical-imaging"` + +`cmr "get ml-model raw 3d-unet kits19 medical-imaging [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,ml-model,raw,3d-unet,kits19,medical-imaging' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,ml-model,raw,3d-unet,kits19,medical-imaging"``` + +#### Run this script via Docker (beta) + +`cm docker script "get ml-model raw 3d-unet kits19 medical-imaging[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_onnx,fp32` + - Environment variables: + - *CM_ML_MODEL_ACCURACY*: `0.86170` + - *CM_PACKAGE_URL*: `https://zenodo.org/record/5597155/files/3dunet_kits19_128x128x128_dynbatch.onnx?download=1` + - Workflow: + * `_pytorch,fp32` + - Environment variables: + - *CM_ML_MODEL_ACCURACY*: `0.86170` + - *CM_PACKAGE_URL*: `https://zenodo.org/record/5597155/files/3dunet_kits19_pytorch.ptc?download=1` + - Workflow: + * `_pytorch,fp32,weights` + - Environment variables: + - *CM_ML_MODEL_ACCURACY*: `0.86170` + - *CM_ML_MODEL_FILE*: `retinanet_model_10.pth` + - *CM_PACKAGE_URL*: `https://zenodo.org/record/5597155/files/3dunet_kits19_pytorch_checkpoint.pth?download=1` + - *CM_UNZIP*: `yes` + - Workflow: + * `_tf,fp32` + - Environment variables: + - *CM_ML_MODEL_ACCURACY*: `0.86170` + - *CM_ML_MODEL_FILE*: `3dunet_kits19_128x128x128.tf` + - *CM_PACKAGE_URL*: `https://zenodo.org/record/5597155/files/3dunet_kits19_128x128x128.tf.zip?download=1` + - *CM_UNZIP*: `yes` + - Workflow: + * `_weights` + - Environment variables: + - *CM_MODEL_WEIGHTS_FILE*: `yes` + - Workflow: + +
+ + + * Group "**framework**" +
+ Click here to expand this section. + + * **`_onnx`** (default) + - Environment variables: + - *CM_ML_MODEL_FRAMEWORK*: `onnx` + - Workflow: + * `_pytorch` + - Environment variables: + - *CM_ML_MODEL_FRAMEWORK*: `pytorch` + - Workflow: + * `_tf` + - Aliases: `_tensorflow` + - Environment variables: + - *CM_ML_MODEL_FRAMEWORK*: `tensorflow` + - Workflow: + +
+ + + * Group "**precision**" +
+ Click here to expand this section. + + * **`_fp32`** (default) + - Environment variables: + - *CM_ML_MODEL_INPUT_DATA_TYPES*: `fp32` + - *CM_ML_MODEL_PRECISION*: `fp32` + - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `fp32` + - Workflow: + +
+ + +#### Default variations + +`_fp32,_onnx` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-3d-unet-kits19/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-3d-unet-kits19/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-3d-unet-kits19/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-3d-unet-kits19/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-3d-unet-kits19/_cm.json) + +___ +### Script output +`cmr "get ml-model raw 3d-unet kits19 medical-imaging [,variations]" -j` +#### New environment keys (filter) + +* `CM_ML_MODEL_*` +#### New environment keys auto-detected from customize + +* `CM_ML_MODEL_FILE` +* `CM_ML_MODEL_FILE_WITH_PATH` +* `CM_ML_MODEL_PATH` \ No newline at end of file diff --git a/docs/AI-ML-models/get-ml-model-bert-base-squad.md b/docs/AI-ML-models/get-ml-model-bert-base-squad.md new file mode 100644 index 0000000000..28bc15a1b8 --- /dev/null +++ b/docs/AI-ML-models/get-ml-model-bert-base-squad.md @@ -0,0 +1,183 @@ +Automatically generated README for this automation recipe: **get-ml-model-bert-base-squad** + +Category: **AI/ML models** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-bert-base-squad,b3b10b452ce24c5f) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-bert-base-squad)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,ml-model,raw,bert,bert-base,bert-squad,language,language-processing* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get ml-model raw bert bert-base bert-squad language language-processing" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,ml-model,raw,bert,bert-base,bert-squad,language,language-processing` + +`cm run script --tags=get,ml-model,raw,bert,bert-base,bert-squad,language,language-processing[,variations] ` + +*or* + +`cmr "get ml-model raw bert bert-base bert-squad language language-processing"` + +`cmr "get ml-model raw bert bert-base bert-squad language language-processing [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,ml-model,raw,bert,bert-base,bert-squad,language,language-processing' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,ml-model,raw,bert,bert-base,bert-squad,language,language-processing"``` + +#### Run this script via Docker (beta) + +`cm docker script "get ml-model raw bert bert-base bert-squad language language-processing[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_deepsparse,int8` + - Environment variables: + - *CM_ML_MODEL_F1*: `87.89` + - *CM_ML_MODEL_FILE*: `model.onnx` + - *CM_PRUNING_PERCENTAGE*: `95` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,ml-model,zoo,deepsparse,_pruned95_obs_quant-none + * CM names: `--adr.['neural-magic-zoo-downloader']...` + - *Warning: no scripts found* + +
+ + + * Group "**framework**" +
+ Click here to expand this section. + + * `_deepsparse` + - Environment variables: + - *CM_ML_MODEL_FRAMEWORK*: `deepsparse` + - *CM_ML_MODEL_INPUT_IDS_NAME*: `input_ids` + - *CM_ML_MODEL_INPUT_MASK_NAME*: `input_mask` + - *CM_ML_MODEL_INPUT_SEGMENTS_NAME*: `segment_ids` + - *CM_ML_MODEL_OUTPUT_END_LOGITS_NAME*: `output_end_logits` + - *CM_ML_MODEL_OUTPUT_START_LOGITS_NAME*: `output_start_logits` + - Workflow: + +
+ + + * Group "**precision**" +
+ Click here to expand this section. + + * **`_fp32`** (default) + - Environment variables: + - *CM_ML_MODEL_PRECISION*: `fp32` + - Workflow: + * `_int8` + - Environment variables: + - *CM_ML_MODEL_PRECISION*: `int8` + - *CM_ML_MODEL_QUANTIZED*: `yes` + - Workflow: + +
+ + +#### Default variations + +`_fp32` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-bert-base-squad/_cm.json) + 1. Run "preprocess" function from customize.py + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-bert-base-squad/_cm.json)*** + * download-and-extract + * Enable this dependency only if all ENV vars are set:
+`{'CM_TMP_ML_MODEL_REQUIRE_DOWNLOAD': 'yes'}` + - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-bert-base-squad/_cm.json) + 1. Run "postrocess" function from customize.py + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-bert-base-squad/_cm.json)*** + * get,bert,squad,vocab + - CM script: [get-bert-squad-vocab](https://github.com/mlcommons/cm4mlops/tree/master/script/get-bert-squad-vocab) + +___ +### Script output +`cmr "get ml-model raw bert bert-base bert-squad language language-processing [,variations]" -j` +#### New environment keys (filter) + +* `CM_ML_MODEL*` +#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-models/get-ml-model-bert-large-squad.md b/docs/AI-ML-models/get-ml-model-bert-large-squad.md new file mode 100644 index 0000000000..df467b7a4e --- /dev/null +++ b/docs/AI-ML-models/get-ml-model-bert-large-squad.md @@ -0,0 +1,357 @@ +Automatically generated README for this automation recipe: **get-ml-model-bert-large-squad** + +Category: **AI/ML models** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-bert-large-squad,5e865dbdc65949d2) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-bert-large-squad)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,ml-model,raw,bert,bert-large,bert-squad,language,language-processing* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get ml-model raw bert bert-large bert-squad language language-processing" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,ml-model,raw,bert,bert-large,bert-squad,language,language-processing` + +`cm run script --tags=get,ml-model,raw,bert,bert-large,bert-squad,language,language-processing[,variations] ` + +*or* + +`cmr "get ml-model raw bert bert-large bert-squad language language-processing"` + +`cmr "get ml-model raw bert bert-large bert-squad language language-processing [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,ml-model,raw,bert,bert-large,bert-squad,language,language-processing' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,ml-model,raw,bert,bert-large,bert-squad,language,language-processing"``` + +#### Run this script via Docker (beta) + +`cm docker script "get ml-model raw bert bert-large bert-squad language language-processing[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_deepsparse,int8` + - Environment variables: + - *CM_ML_MODEL_F1*: `90.21282641816266` + - *CM_ML_MODEL_FILE*: `oBERT-Large_95sparse_block4_qat.onnx` + - *CM_DAE_EXTRACT_DOWNLOADED*: `yes` + - Workflow: + * `_deepsparse,int8,github` + - Environment variables: + - *CM_PACKAGE_URL*: `https://github.com/mlcommons/inference_results_v2.1/raw/master/open/NeuralMagic/code/bert/deepsparse/models/oBERT-Large_95sparse_block4_qat.onnx.tar.xz` + - Workflow: + * `_onnx,fp32` + - Environment variables: + - *CM_ML_MODEL_F1*: `90.874` + - Workflow: + * `_onnx,fp32,armi` + - Environment variables: + - *CM_PACKAGE_URL*: `https://armi.in/files/model.onnx` + - *CM_PACKAGE_URL1*: `https://zenodo.org/record/3733910/files/model.onnx` + - Workflow: + * `_onnx,fp32,zenodo` + - Environment variables: + - *CM_PACKAGE_URL*: `https://zenodo.org/record/3733910/files/model.onnx` + - Workflow: + * `_onnx,int8` + - Environment variables: + - *CM_ML_MODEL_F1*: `90.067` + - *CM_PACKAGE_URL*: `https://zenodo.org/record/3750364/files/bert_large_v1_1_fake_quant.onnx` + - Workflow: + * `_onnx,int8,amazon-s3` + - Environment variables: + - *CM_PACKAGE_URL*: `https://mlperf-public.s3.us-west-2.amazonaws.com/bert_large_v1_1_fake_quant.onnx` + - Workflow: + * `_onnx,int8,zenodo` + - Environment variables: + - *CM_PACKAGE_URL*: `https://zenodo.org/record/3750364/files/bert_large_v1_1_fake_quant.onnx` + - Workflow: + * `_onnxruntime` + - Workflow: + * `_pytorch,fp32` + - Environment variables: + - *CM_ML_MODEL_F1*: `90.874` + - *CM_DOWNLOAD_CHECKSUM*: `00fbcbfaebfa20d87ac9885120a6e9b4` + - Workflow: + * `_pytorch,fp32,armi` + - Environment variables: + - *CM_PACKAGE_URL*: `https://armi.in/files/fp32/model.pytorch` + - *CM_PACKAGE_URL1*: `https://zenodo.org/record/3733896/files/model.pytorch` + - Workflow: + * `_pytorch,fp32,zenodo` + - Environment variables: + - *CM_PACKAGE_URL*: `https://zenodo.org/record/3733896/files/model.pytorch` + - Workflow: + * `_pytorch,int8` + - Environment variables: + - *CM_ML_MODEL_F1*: `90.633` + - Workflow: + * `_pytorch,int8,armi` + - Environment variables: + - *CM_PACKAGE_URL*: `https://armi.in/files/int8/pytorch_model.bin` + - *CM_PACKAGE_URL1*: `https://zenodo.org/record/4792496/files/pytorch_model.bin` + - Workflow: + * `_pytorch,int8,zenodo` + - Environment variables: + - *CM_PACKAGE_URL*: `https://zenodo.org/record/4792496/files/pytorch_model.bin` + - Workflow: + * `_tensorflow` + - Workflow: + * `_tf,fp32` + - Environment variables: + - *CM_ML_MODEL_F1*: `90.874` + - Workflow: + * `_tf,fp32,zenodo` + - Environment variables: + - *CM_PACKAGE_URL*: `https://zenodo.org/record/3939747/files/model.pb` + - Workflow: + +
+ + + * Group "**download-source**" +
+ Click here to expand this section. + + * `_amazon-s3` + - Workflow: + * `_armi` + - Workflow: + * `_custom-url.#` + - Environment variables: + - *CM_PACKAGE_URL*: `#` + - Workflow: + * `_github` + - Workflow: + * `_zenodo` + - Workflow: + +
+ + + * Group "**framework**" +
+ Click here to expand this section. + + * `_deepsparse` + - Environment variables: + - *CM_ML_MODEL_FRAMEWORK*: `deepsparse` + - *CM_ML_MODEL_INPUT_IDS_NAME*: `input_ids` + - *CM_ML_MODEL_INPUT_MASK_NAME*: `input_mask` + - *CM_ML_MODEL_INPUT_SEGMENTS_NAME*: `segment_ids` + - *CM_ML_MODEL_OUTPUT_END_LOGITS_NAME*: `output_end_logits` + - *CM_ML_MODEL_OUTPUT_START_LOGITS_NAME*: `output_start_logits` + - Workflow: + * **`_onnx`** (default) + - Environment variables: + - *CM_ML_MODEL_FRAMEWORK*: `onnx` + - *CM_ML_MODEL_INPUT_IDS_NAME*: `input_ids` + - *CM_ML_MODEL_INPUT_MASK_NAME*: `input_mask` + - *CM_ML_MODEL_INPUT_SEGMENTS_NAME*: `segment_ids` + - *CM_ML_MODEL_OUTPUT_END_LOGITS_NAME*: `output_end_logits` + - *CM_ML_MODEL_OUTPUT_START_LOGITS_NAME*: `output_start_logits` + - Workflow: + * `_pytorch` + - Environment variables: + - *CM_ML_MODEL_FRAMEWORK*: `pytorch` + - *CM_ML_MODEL_INPUT_IDS_NAME*: `input_ids` + - *CM_ML_MODEL_INPUT_MASK_NAME*: `input_mask` + - *CM_ML_MODEL_INPUT_SEGMENTS_NAME*: `segment_ids` + - *CM_ML_MODEL_OUTPUT_END_LOGITS_NAME*: `output_end_logits` + - *CM_ML_MODEL_OUTPUT_START_LOGITS_NAME*: `output_start_logits` + - Workflow: + * `_tf` + - Environment variables: + - *CM_ML_MODEL_FRAMEWORK*: `tf` + - *CM_ML_MODEL_INPUT_IDS_NAME*: `input_ids` + - *CM_ML_MODEL_INPUT_MASK_NAME*: `input_mask` + - *CM_ML_MODEL_INPUT_SEGMENTS_NAME*: `segment_ids` + - *CM_ML_MODEL_OUTPUT_END_LOGITS_NAME*: `output_end_logits` + - *CM_ML_MODEL_OUTPUT_START_LOGITS_NAME*: `output_start_logits` + - Workflow: + +
+ + + * Group "**packing**" +
+ Click here to expand this section. + + * `_packed` + - Environment variables: + - *CM_ML_MODEL_BERT_PACKED*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-python-lib,_torch + * CM names: `--adr.['torch', 'pytorch']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.tensorflow + * CM names: `--adr.['tensorflow']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.transformers + * CM names: `--adr.['transformers']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.protobuf + * CM names: `--adr.['protobuf']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.onnx + * CM names: `--adr.['onnx']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_onnx-graphsurgeon + * CM names: `--adr.['onnx-graphsurgeon']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_numpy + * CM names: `--adr.['numpy']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,mlperf,inference,src + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + 1. ***Read "prehook_deps" on other CM scripts*** + * download,file,_wget,_url.https://zenodo.org/record/3733868/files/model.ckpt-5474.data-00000-of-00001 + - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) + * download,file,_wget,_url.https://zenodo.org/record/3733868/files/model.ckpt-5474.index + - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) + * download,file,_wget,_url.https://zenodo.org/record/3733868/files/model.ckpt-5474.meta + - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) + * download,file,_wget,_url.https://zenodo.org/record/3733868/files/vocab.txt + - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) + * download,file,_wget,_url.https://raw.githubusercontent.com/krai/axs2kilt/main/model_onnx_bert_large_packed_recipe/convert_model.py + - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) + * **`_unpacked`** (default) + - Environment variables: + - *CM_ML_MODEL_BERT_PACKED*: `no` + - Workflow: + +
+ + + * Group "**precision**" +
+ Click here to expand this section. + + * **`_fp32`** (default) + - Environment variables: + - *CM_ML_MODEL_PRECISION*: `fp32` + - Workflow: + * `_int8` + - Environment variables: + - *CM_ML_MODEL_PRECISION*: `int8` + - *CM_ML_MODEL_QUANTIZED*: `yes` + - Workflow: + +
+ + +#### Default variations + +`_fp32,_onnx,_unpacked` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-bert-large-squad/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-bert-large-squad/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-bert-large-squad/_cm.json)*** + * download-and-extract + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_ML_MODEL_BERT_PACKED': ['yes']}` + - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) + 1. ***Run native script if exists*** + * [run-packed.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-bert-large-squad/run-packed.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-bert-large-squad/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-bert-large-squad/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-bert-large-squad/_cm.json)*** + * get,dataset-aux,squad-vocab + - CM script: [get-dataset-squad-vocab](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-squad-vocab) + +___ +### Script output +`cmr "get ml-model raw bert bert-large bert-squad language language-processing [,variations]" -j` +#### New environment keys (filter) + +* `CM_ML_MODEL*` +#### New environment keys auto-detected from customize + +* `CM_ML_MODEL_BERT_LARGE_FP32_PATH` +* `CM_ML_MODEL_BERT_LARGE_INT8_PATH` +* `CM_ML_MODEL_BERT_PACKED_PATH` +* `CM_ML_MODEL_FILE` +* `CM_ML_MODEL_FILE_WITH_PATH` \ No newline at end of file diff --git a/docs/AI-ML-models/get-ml-model-dlrm-terabyte.md b/docs/AI-ML-models/get-ml-model-dlrm-terabyte.md new file mode 100644 index 0000000000..cc5c0328a8 --- /dev/null +++ b/docs/AI-ML-models/get-ml-model-dlrm-terabyte.md @@ -0,0 +1,262 @@ +Automatically generated README for this automation recipe: **get-ml-model-dlrm-terabyte** + +Category: **AI/ML models** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-dlrm-terabyte,8fa7582c603a4db3) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-dlrm-terabyte)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,ml-model,dlrm,raw,terabyte,criteo-terabyte,criteo,recommendation* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get ml-model dlrm raw terabyte criteo-terabyte criteo recommendation" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,ml-model,dlrm,raw,terabyte,criteo-terabyte,criteo,recommendation` + +`cm run script --tags=get,ml-model,dlrm,raw,terabyte,criteo-terabyte,criteo,recommendation[,variations] [--input_flags]` + +*or* + +`cmr "get ml-model dlrm raw terabyte criteo-terabyte criteo recommendation"` + +`cmr "get ml-model dlrm raw terabyte criteo-terabyte criteo recommendation [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,ml-model,dlrm,raw,terabyte,criteo-terabyte,criteo,recommendation' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,ml-model,dlrm,raw,terabyte,criteo-terabyte,criteo,recommendation"``` + +#### Run this script via Docker (beta) + +`cm docker script "get ml-model dlrm raw terabyte criteo-terabyte criteo recommendation[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_debug` + - Environment variables: + - *CM_ML_MODEL_DEBUG*: `yes` + - Workflow: + * `_onnx,fp32` + - Environment variables: + - *CM_ML_MODEL_ACCURACY*: `0.8025` + - *CM_PACKAGE_URL*: `https://dlrm.s3-us-west-1.amazonaws.com/models/tb00_40M.onnx.tar` + - *CM_UNTAR*: `yes` + - *CM_ML_MODEL_FILE*: `tb00_40M.onnx` + - *CM_ML_MODEL_DLRM_MAX_INDEX_RANGE*: `40000000` + - Workflow: + * `_onnx,fp32,debug` + - Environment variables: + - *CM_ML_MODEL_ACCURACY*: `0.8107` + - *CM_PACKAGE_URL*: `https://dlrm.s3-us-west-1.amazonaws.com/models/tb0875_10M.onnx.tar` + - *CM_ML_MODEL_DLRM_MAX_INDEX_RANGE*: `10000000` + - *CM_UNTAR*: `yes` + - *CM_ML_MODEL_FILE*: `tb0875_10M.onnx` + - Workflow: + * `_pytorch,fp32` + - Environment variables: + - *CM_ML_MODEL_ACCURACY*: `0.8025` + - *CM_PACKAGE_URL*: `https://dlrm.s3-us-west-1.amazonaws.com/models/tb00_40M.pt` + - *CM_ML_MODEL_DLRM_MAX_INDEX_RANGE*: `40000000` + - *CM_DOWNLOAD_CHECKSUM*: `2d49a5288cddb37c3c64860a06d79bb9` + - Workflow: + * `_pytorch,fp32,debug` + - Environment variables: + - *CM_ML_MODEL_ACCURACY*: `0.8107` + - *CM_PACKAGE_URL*: `https://dlrm.s3-us-west-1.amazonaws.com/models/tb0875_10M.pt` + - *CM_ML_MODEL_DLRM_MAX_INDEX_RANGE*: `10000000` + - Workflow: + * `_pytorch,fp32,weight_sharded` + - Environment variables: + - *CM_ML_MODEL_ACCURACY*: `0.8025` + - *CM_ML_MODEL_DLRM_MAX_INDEX_RANGE*: `40000000` + - *CM_ML_MODEL_FILE*: `model_weights` + - *CM_TMP_MODEL_ADDITIONAL_NAME*: `` + - *CM_DOWNLOAD_CHECKSUM*: `` + - Workflow: + * `_pytorch,fp32,weight_sharded,rclone` + - Environment variables: + - *CM_RCLONE_CONFIG_CMD*: `rclone config create mlc-inference s3 provider=Cloudflare access_key_id=f65ba5eef400db161ea49967de89f47b secret_access_key=fbea333914c292b854f14d3fe232bad6c5407bf0ab1bebf78833c2b359bdfd2b endpoint=https://c2686074cb2caf5cbaf6d134bdba8b47.r2.cloudflarestorage.com` + - *CM_PACKAGE_URL*: `mlc-inference:mlcommons-inference-wg-public/model_weights` + - Workflow: + * `_pytorch,fp32,weight_sharded,wget` + - Environment variables: + - *CM_PACKAGE_URL*: `https://cloud.mlcommons.org/index.php/s/XzfSeLgW8FYfR3S/download` + - *CM_DAE_EXTRACT_DOWNLOADED*: `yes` + - *CM_DOWNLOAD_FILENAME*: `download` + - *CM_EXTRACT_UNZIP*: `yes` + - Workflow: + +
+ + + * Group "**download-tool**" +
+ Click here to expand this section. + + * `_rclone` + - Workflow: + * `_wget` + - Workflow: + +
+ + + * Group "**framework**" +
+ Click here to expand this section. + + * `_onnx` + - Environment variables: + - *CM_ML_MODEL_FRAMEWORK*: `onnx` + - Workflow: + * **`_pytorch`** (default) + - Environment variables: + - *CM_ML_MODEL_FRAMEWORK*: `pytorch` + - *CM_TMP_MODEL_ADDITIONAL_NAME*: `dlrm_terabyte.pytorch` + - Workflow: + +
+ + + * Group "**precision**" +
+ Click here to expand this section. + + * **`_fp32`** (default) + - Environment variables: + - *CM_ML_MODEL_INPUT_DATA_TYPES*: `fp32` + - *CM_ML_MODEL_PRECISION*: `fp32` + - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `fp32` + - Workflow: + +
+ + + * Group "**type**" +
+ Click here to expand this section. + + * **`_weight_sharded`** (default) + - Environment variables: + - *CM_DLRM_MULTIHOT_MODEL*: `yes` + - Workflow: + +
+ + +#### Default variations + +`_fp32,_pytorch,_weight_sharded` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--dir=value` → `CM_DOWNLOAD_PATH=value` +* `--download_path=value` → `CM_DOWNLOAD_PATH=value` +* `--to=value` → `CM_DOWNLOAD_PATH=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "dir":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-dlrm-terabyte/_cm.json) + 1. Run "preprocess" function from customize.py + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-dlrm-terabyte/_cm.json)*** + * download-and-extract + * CM names: `--adr.['dae']...` + - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-dlrm-terabyte/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-dlrm-terabyte/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-dlrm-terabyte/_cm.json) + +___ +### Script output +`cmr "get ml-model dlrm raw terabyte criteo-terabyte criteo recommendation [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_ML_MODEL_*` +#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-models/get-ml-model-efficientnet-lite.md b/docs/AI-ML-models/get-ml-model-efficientnet-lite.md new file mode 100644 index 0000000000..c819766667 --- /dev/null +++ b/docs/AI-ML-models/get-ml-model-efficientnet-lite.md @@ -0,0 +1,248 @@ +Automatically generated README for this automation recipe: **get-ml-model-efficientnet-lite** + +Category: **AI/ML models** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-efficientnet-lite,1041f681977d4b7c) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-efficientnet-lite)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,ml-model,efficientnet,raw,ml-model-efficientnet,ml-model-efficientnet-lite,lite,tflite,image-classification* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get ml-model efficientnet raw ml-model-efficientnet ml-model-efficientnet-lite lite tflite image-classification" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,ml-model,efficientnet,raw,ml-model-efficientnet,ml-model-efficientnet-lite,lite,tflite,image-classification` + +`cm run script --tags=get,ml-model,efficientnet,raw,ml-model-efficientnet,ml-model-efficientnet-lite,lite,tflite,image-classification[,variations] ` + +*or* + +`cmr "get ml-model efficientnet raw ml-model-efficientnet ml-model-efficientnet-lite lite tflite image-classification"` + +`cmr "get ml-model efficientnet raw ml-model-efficientnet ml-model-efficientnet-lite lite tflite image-classification [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,ml-model,efficientnet,raw,ml-model-efficientnet,ml-model-efficientnet-lite,lite,tflite,image-classification' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,ml-model,efficientnet,raw,ml-model-efficientnet,ml-model-efficientnet-lite,lite,tflite,image-classification"``` + +#### Run this script via Docker (beta) + +`cm docker script "get ml-model efficientnet raw ml-model-efficientnet ml-model-efficientnet-lite lite tflite image-classification[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_tflite` + - Workflow: + +
+ + + * Group "**kind**" +
+ Click here to expand this section. + + * **`_lite0`** (default) + - Environment variables: + - *CM_ML_MODEL_EFFICIENTNET_LITE_KIND*: `lite0` + - Workflow: + * `_lite1` + - Environment variables: + - *CM_ML_MODEL_EFFICIENTNET_LITE_KIND*: `lite1` + - Workflow: + * `_lite2` + - Environment variables: + - *CM_ML_MODEL_EFFICIENTNET_LITE_KIND*: `lite2` + - Workflow: + * `_lite3` + - Environment variables: + - *CM_ML_MODEL_EFFICIENTNET_LITE_KIND*: `lite3` + - Workflow: + * `_lite4` + - Environment variables: + - *CM_ML_MODEL_EFFICIENTNET_LITE_KIND*: `lite4` + - Workflow: + +
+ + + * Group "**precision**" +
+ Click here to expand this section. + + * **`_fp32`** (default) + - Environment variables: + - *CM_ML_MODEL_EFFICIENTNET_LITE_PRECISION*: `fp32` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_PRECISION*: `fp32` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp32` + - Workflow: + * `_uint8` + - Aliases: `_int8` + - Environment variables: + - *CM_ML_MODEL_EFFICIENTNET_LITE_PRECISION*: `int8` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `uint8` + - *CM_ML_MODEL_PRECISION*: `uint8` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `uint8` + - Workflow: + +
+ + + * Group "**resolution**" +
+ Click here to expand this section. + + * **`_resolution-224`** (default) + - Environment variables: + - *CM_ML_MODEL_IMAGE_HEIGHT*: `224` + - *CM_ML_MODEL_IMAGE_WIDTH*: `224` + - *CM_ML_MODEL_MOBILENET_RESOLUTION*: `224` + - *CM_DATASET_PREPROCESSED_IMAGENET_DEP_TAGS*: `_resolution.224` + - Workflow: + * `_resolution-240` + - Environment variables: + - *CM_ML_MODEL_IMAGE_HEIGHT*: `240` + - *CM_ML_MODEL_IMAGE_WIDTH*: `240` + - *CM_ML_MODEL_MOBILENET_RESOLUTION*: `240` + - *CM_DATASET_PREPROCESSED_IMAGENET_DEP_TAGS*: `_resolution.240` + - Workflow: + * `_resolution-260` + - Environment variables: + - *CM_ML_MODEL_IMAGE_HEIGHT*: `260` + - *CM_ML_MODEL_IMAGE_WIDTH*: `260` + - *CM_ML_MODEL_MOBILENET_RESOLUTION*: `260` + - *CM_DATASET_PREPROCESSED_IMAGENET_DEP_TAGS*: `_resolution.260` + - Workflow: + * `_resolution-280` + - Environment variables: + - *CM_ML_MODEL_IMAGE_HEIGHT*: `280` + - *CM_ML_MODEL_IMAGE_WIDTH*: `280` + - *CM_ML_MODEL_MOBILENET_RESOLUTION*: `280` + - *CM_DATASET_PREPROCESSED_IMAGENET_DEP_TAGS*: `_resolution.280` + - Workflow: + * `_resolution-300` + - Environment variables: + - *CM_ML_MODEL_IMAGE_HEIGHT*: `300` + - *CM_ML_MODEL_IMAGE_WIDTH*: `300` + - *CM_ML_MODEL_MOBILENET_RESOLUTION*: `300` + - *CM_DATASET_PREPROCESSED_IMAGENET_DEP_TAGS*: `_resolution.300` + - Workflow: + +
+ + +#### Default variations + +`_fp32,_lite0,_resolution-224` + +#### Valid variation combinations checked by the community + + + +* `_lite0,_resolution-224` +* `_lite1,_resolution-240` +* `_lite2,_resolution-260` +* `_lite3,_resolution-280` +* `_lite4,_resolution-300` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_ML_MODEL_INPUTS_DATA_TYPE: `fp32` +* CM_ML_MODEL_PRECISION: `fp32` +* CM_ML_MODEL_WEIGHTS_DATA_TYPE: `fp32` + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-efficientnet-lite/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-efficientnet-lite/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-efficientnet-lite/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-efficientnet-lite/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-efficientnet-lite/_cm.json) + +___ +### Script output +`cmr "get ml-model efficientnet raw ml-model-efficientnet ml-model-efficientnet-lite lite tflite image-classification [,variations]" -j` +#### New environment keys (filter) + +* `CM_DATASET_PREPROCESSED_IMAGENET_DEP_TAGS` +* `CM_ML_MODEL_*` +#### New environment keys auto-detected from customize + +* `CM_ML_MODEL_FILE` +* `CM_ML_MODEL_FILE_WITH_PATH` +* `CM_ML_MODEL_PATH` +* `CM_ML_MODEL_STARTING_WEIGHTS_FILENAME` \ No newline at end of file diff --git a/docs/AI-ML-models/get-ml-model-gptj.md b/docs/AI-ML-models/get-ml-model-gptj.md new file mode 100644 index 0000000000..5231048a74 --- /dev/null +++ b/docs/AI-ML-models/get-ml-model-gptj.md @@ -0,0 +1,321 @@ +Automatically generated README for this automation recipe: **get-ml-model-gptj** + +Category: **AI/ML models** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-gptj,a41166210f294fbf) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-gptj)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,raw,ml-model,gptj,gpt-j,large-language-model* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get raw ml-model gptj gpt-j large-language-model" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,raw,ml-model,gptj,gpt-j,large-language-model` + +`cm run script --tags=get,raw,ml-model,gptj,gpt-j,large-language-model[,variations] [--input_flags]` + +*or* + +`cmr "get raw ml-model gptj gpt-j large-language-model"` + +`cmr "get raw ml-model gptj gpt-j large-language-model [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,raw,ml-model,gptj,gpt-j,large-language-model' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,raw,ml-model,gptj,gpt-j,large-language-model"``` + +#### Run this script via Docker (beta) + +`cm docker script "get raw ml-model gptj gpt-j large-language-model[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_batch_size.#` + - Environment variables: + - *CM_ML_MODEL_BATCH_SIZE*: `#` + - Workflow: + * `_pytorch,fp32` + - Environment variables: + - *CM_DOWNLOAD_EXTRA_OPTIONS*: ` --output-document checkpoint.zip` + - *CM_UNZIP*: `yes` + - *CM_DOWNLOAD_CHECKSUM_NOT_USED*: `e677e28aaf03da84584bb3073b7ee315` + - *CM_PACKAGE_URL*: `https://cloud.mlcommons.org/index.php/s/QAZ2oM94MkFtbQx/download` + - *CM_RCLONE_CONFIG_CMD*: `rclone config create mlc-inference s3 provider=Cloudflare access_key_id=f65ba5eef400db161ea49967de89f47b secret_access_key=fbea333914c292b854f14d3fe232bad6c5407bf0ab1bebf78833c2b359bdfd2b endpoint=https://c2686074cb2caf5cbaf6d134bdba8b47.r2.cloudflarestorage.com` + - *CM_RCLONE_URL*: `mlc-inference:mlcommons-inference-wg-public/gpt-j` + - Workflow: + * `_pytorch,fp32,wget` + - Workflow: + * `_pytorch,int4,intel` + - Workflow: + * `_pytorch,int8,intel` + - Workflow: + * `_pytorch,intel` + - Environment variables: + - *CM_GPTJ_INTEL_MODEL*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,mlperf,inference,results + - CM script: [get-mlperf-inference-results](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-results) + - CM script: [get-mlperf-inference-results-dir](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-results-dir) + * get,ml-model,gpt-j,_fp32,_pytorch + - CM script: [get-ml-model-gptj](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-gptj) + * get,conda,_name.gptj-pt + - CM script: [get-conda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-conda) + * get,python,_conda.gptj-pt + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic,conda-package,_package.intel-openmp,_source.intel + * CM names: `--adr.['conda-package', 'intel-openmp']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.jemalloc,_source.conda-forge + * CM names: `--adr.['conda-package', 'jemalloc']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * install,ipex,from.src,_for-intel-mlperf-inference-v3.1-gptj + - CM script: [install-ipex-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-ipex-from-src) + * get,dataset,cnndm,_calibration + - CM script: [get-dataset-cnndm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-cnndm) + * `_saxml,fp32` + - Environment variables: + - *CM_TMP_MODEL_SAXML*: `fp32` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,ml-model,gptj,_pytorch,_fp32 + - CM script: [get-ml-model-gptj](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-gptj) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-python-lib,_package.jax[cpu] + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.paxml + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.praxis + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.transformers + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.accelerate + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_saxml,int8` + - Environment variables: + - *CM_TMP_MODEL_SAXML*: `int8` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,ml-model,gptj,_saxml,_fp32 + - CM script: [get-ml-model-gptj](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-gptj) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-python-lib,_package.praxis + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.apache-beam + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,git,repo,_repo.https://github.com/google/saxml + * CM names: `--adr.['saxml']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + +
+ + + * Group "**download-tool**" +
+ Click here to expand this section. + + * **`_rclone`** (default) + - Environment variables: + - *CM_DOWNLOAD_FILENAME*: `checkpoint` + - *CM_DOWNLOAD_URL*: `<<>>` + - Workflow: + * `_wget` + - Environment variables: + - *CM_DOWNLOAD_URL*: `<<>>` + - *CM_DOWNLOAD_FILENAME*: `checkpoint.zip` + - Workflow: + +
+ + + * Group "**framework**" +
+ Click here to expand this section. + + * **`_pytorch`** (default) + - Environment variables: + - *CM_ML_MODEL_DATA_LAYOUT*: `NCHW` + - *CM_ML_MODEL_FRAMEWORK*: `pytorch` + - *CM_ML_STARTING_WEIGHTS_FILENAME*: `<<>>` + - Workflow: + * `_saxml` + - Workflow: + +
+ + + * Group "**model-provider**" +
+ Click here to expand this section. + + * `_intel` + - Workflow: + * **`_mlcommons`** (default) + - Workflow: + +
+ + + * Group "**precision**" +
+ Click here to expand this section. + + * `_fp32` + - Environment variables: + - *CM_ML_MODEL_INPUT_DATA_TYPES*: `fp32` + - *CM_ML_MODEL_PRECISION*: `fp32` + - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `fp32` + - Workflow: + * `_int4` + - Environment variables: + - *CM_ML_MODEL_INPUT_DATA_TYPES*: `int4` + - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `int4` + - Workflow: + * `_int8` + - Environment variables: + - *CM_ML_MODEL_INPUT_DATA_TYPES*: `int8` + - *CM_ML_MODEL_PRECISION*: `int8` + - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `int8` + - Workflow: + * `_uint8` + - Environment variables: + - *CM_ML_MODEL_INPUT_DATA_TYPES*: `uint8` + - *CM_ML_MODEL_PRECISION*: `uint8` + - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `uint8` + - Workflow: + +
+ + +#### Default variations + +`_mlcommons,_pytorch,_rclone` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--checkpoint=value` → `GPTJ_CHECKPOINT_PATH=value` +* `--download_path=value` → `CM_DOWNLOAD_PATH=value` +* `--to=value` → `CM_DOWNLOAD_PATH=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "checkpoint":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-gptj/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-gptj/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-gptj/_cm.json)*** + * download-and-extract + * Enable this dependency only if all ENV vars are set:
+`{'CM_TMP_REQUIRE_DOWNLOAD': ['yes']}` + * CM names: `--adr.['dae']...` + - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) + 1. ***Run native script if exists*** + * [run-int4-calibration.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-gptj/run-int4-calibration.sh) + * [run-intel.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-gptj/run-intel.sh) + * [run-saxml-quantized.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-gptj/run-saxml-quantized.sh) + * [run-saxml.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-gptj/run-saxml.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-gptj/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-gptj/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-gptj/_cm.json) + +___ +### Script output +`cmr "get raw ml-model gptj gpt-j large-language-model [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_ML_MODEL_*` +* `GPTJ_CHECKPOINT_PATH` +#### New environment keys auto-detected from customize + +* `CM_ML_MODEL_FILE` +* `CM_ML_MODEL_FILE_WITH_PATH` +* `CM_ML_MODEL_WEIGHT_DATA_TYPES` \ No newline at end of file diff --git a/docs/AI-ML-models/get-ml-model-huggingface-zoo.md b/docs/AI-ML-models/get-ml-model-huggingface-zoo.md new file mode 100644 index 0000000000..7e5d18f566 --- /dev/null +++ b/docs/AI-ML-models/get-ml-model-huggingface-zoo.md @@ -0,0 +1,192 @@ +Automatically generated README for this automation recipe: **get-ml-model-huggingface-zoo** + +Category: **AI/ML models** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-huggingface-zoo,53cf8252a443446a) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-huggingface-zoo)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,ml-model,huggingface,zoo* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get ml-model huggingface zoo" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,ml-model,huggingface,zoo` + +`cm run script --tags=get,ml-model,huggingface,zoo[,variations] [--input_flags]` + +*or* + +`cmr "get ml-model huggingface zoo"` + +`cmr "get ml-model huggingface zoo [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,ml-model,huggingface,zoo' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,ml-model,huggingface,zoo"``` + +#### Run this script via Docker (beta) + +`cm docker script "get ml-model huggingface zoo[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_model-stub.#` + - Environment variables: + - *CM_MODEL_ZOO_STUB*: `#` + - Workflow: + * `_onnx-subfolder` + - Environment variables: + - *CM_HF_SUBFOLDER*: `onnx` + - Workflow: + * `_pierreguillou_bert_base_cased_squad_v1.1_portuguese` + - Environment variables: + - *CM_MODEL_ZOO_STUB*: `pierreguillou/bert-base-cased-squad-v1.1-portuguese` + - Workflow: + * `_prune` + - Environment variables: + - *CM_MODEL_TASK*: `prune` + - Workflow: + +
+ + + * Group "**download-type**" +
+ Click here to expand this section. + + * `_clone-repo` + - Environment variables: + - *CM_GIT_CLONE_REPO*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,git,repo,_lfs + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + +
+ + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--download_path=value` → `CM_DOWNLOAD_PATH=value` +* `--env_key=value` → `CM_MODEL_ZOO_ENV_KEY=value` +* `--full_subfolder=value` → `CM_HF_FULL_SUBFOLDER=value` +* `--model_filename=value` → `CM_MODEL_ZOO_FILENAME=value` +* `--revision=value` → `CM_HF_REVISION=value` +* `--subfolder=value` → `CM_HF_SUBFOLDER=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "download_path":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-huggingface-zoo/_cm.json)*** + * get,python3 + * CM names: `--adr.['python3', 'python']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-python-lib,_huggingface_hub + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-huggingface-zoo/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-huggingface-zoo/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-huggingface-zoo/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-huggingface-zoo/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-huggingface-zoo/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-huggingface-zoo/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-huggingface-zoo/_cm.json) + +___ +### Script output +`cmr "get ml-model huggingface zoo [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_ML_MODEL*` +* `CM_MODEL_ZOO_STUB` +#### New environment keys auto-detected from customize + +* `CM_ML_MODEL_'+env_key+'_FILE_WITH_PATH` +* `CM_ML_MODEL_'+env_key+'_PATH` +* `CM_ML_MODEL_PATH` \ No newline at end of file diff --git a/docs/AI-ML-models/get-ml-model-llama2.md b/docs/AI-ML-models/get-ml-model-llama2.md new file mode 100644 index 0000000000..75957bee94 --- /dev/null +++ b/docs/AI-ML-models/get-ml-model-llama2.md @@ -0,0 +1,222 @@ +Automatically generated README for this automation recipe: **get-ml-model-llama2** + +Category: **AI/ML models** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-llama2,5db97be9f61244c6) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-llama2)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,raw,ml-model,language-processing,llama2,llama2-70b,text-summarization* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get raw ml-model language-processing llama2 llama2-70b text-summarization" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,raw,ml-model,language-processing,llama2,llama2-70b,text-summarization` + +`cm run script --tags=get,raw,ml-model,language-processing,llama2,llama2-70b,text-summarization[,variations] [--input_flags]` + +*or* + +`cmr "get raw ml-model language-processing llama2 llama2-70b text-summarization"` + +`cmr "get raw ml-model language-processing llama2 llama2-70b text-summarization [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,raw,ml-model,language-processing,llama2,llama2-70b,text-summarization' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,raw,ml-model,language-processing,llama2,llama2-70b,text-summarization"``` + +#### Run this script via Docker (beta) + +`cm docker script "get raw ml-model language-processing llama2 llama2-70b text-summarization[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_batch_size.#` + - Environment variables: + - *CM_ML_MODEL_BATCH_SIZE*: `#` + - Workflow: + * `_pytorch,fp32` + - Workflow: + +
+ + + * Group "**framework**" +
+ Click here to expand this section. + + * **`_pytorch`** (default) + - Environment variables: + - *CM_ML_MODEL_FRAMEWORK*: `pytorch` + - Workflow: + +
+ + + * Group "**huggingface-stub**" +
+ Click here to expand this section. + + * **`_meta-llama/Llama-2-70b-chat-hf`** (default) + - Environment variables: + - *CM_GIT_CHECKOUT_FOLDER*: `Llama-2-70b-chat-hf` + - *CM_MODEL_ZOO_ENV_KEY*: `LLAMA2` + - Workflow: + * `_meta-llama/Llama-2-7b-chat-hf` + - Environment variables: + - *CM_GIT_CHECKOUT_FOLDER*: `Llama-2-7b-chat-hf` + - *CM_MODEL_ZOO_ENV_KEY*: `LLAMA2` + - Workflow: + * `_stub.#` + - Environment variables: + - *CM_MODEL_ZOO_ENV_KEY*: `LLAMA2` + - Workflow: + +
+ + + * Group "**precision**" +
+ Click here to expand this section. + + * **`_fp32`** (default) + - Environment variables: + - *CM_ML_MODEL_INPUT_DATA_TYPES*: `fp32` + - *CM_ML_MODEL_PRECISION*: `fp32` + - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `fp32` + - Workflow: + * `_int8` + - Environment variables: + - *CM_ML_MODEL_INPUT_DATA_TYPES*: `int8` + - *CM_ML_MODEL_PRECISION*: `int8` + - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `int8` + - Workflow: + * `_uint8` + - Environment variables: + - *CM_ML_MODEL_INPUT_DATA_TYPES*: `uint8` + - *CM_ML_MODEL_PRECISION*: `uint8` + - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `uint8` + - Workflow: + +
+ + +#### Default variations + +`_fp32,_meta-llama/Llama-2-70b-chat-hf,_pytorch` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--checkpoint=value` → `LLAMA2_CHECKPOINT_PATH=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "checkpoint":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-llama2/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-llama2/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-llama2/_cm.json)*** + * get,ml-model,huggingface,zoo,_clone-repo + * Enable this dependency only if all ENV vars are set:
+`{'CM_TMP_REQUIRE_DOWNLOAD': ['yes']}` + * CM names: `--adr.['hf-zoo']...` + - CM script: [get-ml-model-huggingface-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-huggingface-zoo) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-llama2/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-llama2/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-llama2/_cm.json) + +___ +### Script output +`cmr "get raw ml-model language-processing llama2 llama2-70b text-summarization [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_ML_MODEL_*` +* `LLAMA2_CHECKPOINT_PATH` +#### New environment keys auto-detected from customize + +* `CM_ML_MODEL_PATH` \ No newline at end of file diff --git a/docs/AI-ML-models/get-ml-model-mobilenet.md b/docs/AI-ML-models/get-ml-model-mobilenet.md new file mode 100644 index 0000000000..94f71e697e --- /dev/null +++ b/docs/AI-ML-models/get-ml-model-mobilenet.md @@ -0,0 +1,470 @@ +Automatically generated README for this automation recipe: **get-ml-model-mobilenet** + +Category: **AI/ML models** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-mobilenet,ce46675a3ab249e4) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-mobilenet)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,ml-model,mobilenet,raw,ml-model-mobilenet,image-classification* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get ml-model mobilenet raw ml-model-mobilenet image-classification" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,ml-model,mobilenet,raw,ml-model-mobilenet,image-classification` + +`cm run script --tags=get,ml-model,mobilenet,raw,ml-model-mobilenet,image-classification[,variations] ` + +*or* + +`cmr "get ml-model mobilenet raw ml-model-mobilenet image-classification"` + +`cmr "get ml-model mobilenet raw ml-model-mobilenet image-classification [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,ml-model,mobilenet,raw,ml-model-mobilenet,image-classification' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,ml-model,mobilenet,raw,ml-model-mobilenet,image-classification"``` + +#### Run this script via Docker (beta) + +`cm docker script "get ml-model mobilenet raw ml-model-mobilenet image-classification[variations]" ` + +___ +### Customization + + +#### Variations + + * *Internal group (variations should not be selected manually)* +
+ Click here to expand this section. + + * `_quantized_` + - Environment variables: + - *CM_ML_MODEL_MOBILENET_NAME_SUFFIX*: `_quant` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `yes` + - Workflow: + * `_tf,from.google,v2,quantized_` + - Environment variables: + - *CM_PACKAGE_URL*: `https://storage.googleapis.com/mobilenet_v2/checkpoints/<<>>_v2_<<>>_<<>>.tgz` + - *CM_ML_MODEL_WEIGHTS_FILE*: `<<>>_v2_<<>>_<<>>.ckpt.data-00000-of-00001` + - *CM_ML_MODEL_FILE*: `model.tflite` + - *CM_EXTRACT_FOLDER*: `v2_<<>>_<<>>` + - *CM_UNTAR*: `yes` + - Workflow: + +
+ + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_onnx,fp32,v1` + - Environment variables: + - *CM_ML_MODEL_NORMALIZE_DATA*: `yes` + - *CM_ML_MODEL_SUBTRACT_MEANS*: `no` + - *CM_ML_MODEL_VER*: `1_1.0_224` + - *CM_ML_MODEL_INPUT_LAYER_NAME*: `input:0` + - *CM_ML_MODEL_OUTPUT_LAYER_NAME*: `MobilenetV1/Predictions/Reshape_1:0` + - Workflow: + * `_onnx,int8,v1` + - Environment variables: + - *CM_ML_MODEL_NORMALIZE_DATA*: `no` + - *CM_ML_MODEL_SUBTRACT_MEANS*: `yes` + - *CM_ML_MODEL_GIVEN_CHANNEL_MEANS*: `128.0 128.0 128.0` + - *CM_ML_MODEL_VER*: `1_1.0_224_quant` + - *CM_ML_MODEL_INPUT_LAYER_NAME*: `0` + - *CM_ML_MODEL_OUTPUT_LAYER_NAME*: `169` + - *CM_PACKAGE_URL*: `https://zenodo.org/record/3353417/files/Quantized%20MobileNet.zip` + - *CM_ML_MODEL_FILE*: `mobilenet_sym_no_bn.onnx` + - *CM_UNZIP*: `yes` + - Workflow: + * `_onnx,opset-11,fp32,v1` + - Environment variables: + - *CM_PACKAGE_URL*: `https://zenodo.org/record/4735651/files/mobilenet_v1_1.0_224.onnx` + - Workflow: + * `_onnx,opset-8,fp32,v1` + - Environment variables: + - *CM_PACKAGE_URL*: `https://zenodo.org/record/3157894/files/mobilenet_v1_1.0_224.onnx` + - Workflow: + * `_tf,fp32,v1,resolution-224,multiplier-1.0` + - Environment variables: + - *CM_ML_MODEL_ACCURACY*: `71.676` + - Workflow: + * `_tf,from.google,v1` + - Environment variables: + - *CM_PACKAGE_URL*: `http://download.tensorflow.org/models/mobilenet_v1_2018_08_02/mobilenet_v1_<<>>_<<>><<>>.tgz` + - *CM_UNTAR*: `yes` + - Workflow: + * `_tf,from.google,v2,fp32` + - Environment variables: + - *CM_PACKAGE_URL*: `https://storage.googleapis.com/mobilenet_v2/checkpoints/mobilenet_v2_<<>>_<<>>.tgz` + - *CM_ML_MODEL_WEIGHTS_FILE*: `mobilenet_v2_<<>>_<<>>.ckpt.data-00000-of-00001` + - *CM_ML_MODEL_FILE*: `mobilenet_v2_<<>>_<<>>.tflite` + - *CM_UNTAR*: `yes` + - Workflow: + * `_tf,from.google,v3` + - Environment variables: + - *CM_PACKAGE_URL*: `https://storage.googleapis.com/mobilenet_v3/checkpoints/v3-<<>>_<<>>_<<>>_<<>>.tgz` + - *CM_EXTRACT_FOLDER*: `v3-<<>>_<<>>_<<>>_<<>>` + - *CM_ML_MODEL_FILE*: `v3-<<>>_<<>>_<<>>_<<>>.tflite` + - *CM_UNTAR*: `yes` + - Workflow: + * `_tf,from.zenodo,v1` + - Environment variables: + - *CM_PACKAGE_URL*: `https://zenodo.org/record/2269307/files/mobilenet_v1_<<>>_<<>><<>>.tgz` + - *CM_UNTAR*: `yes` + - Workflow: + * `_tf,int8,v1,resolution-224,multiplier-1.0` + - Environment variables: + - *CM_ML_MODEL_ACCURACY*: `70.762` + - Workflow: + * `_tf,v1` + - Environment variables: + - *CM_ML_MODEL_VER*: `1_<<>>_<<>><<>>_2018_08_02` + - *CM_ML_MODEL_OUTPUT_LAYER_NAME*: `MobilenetV1/Predictions/Reshape_1` + - *CM_ML_MODEL_WEIGHTS_FILE*: `mobilenet_v1_<<>>_<<>><<>>.ckpt.data-00000-of-00001` + - *CM_ML_MODEL_FILE*: `mobilenet_v1_<<>>_<<>><<>>.tflite` + - Workflow: + * `_tf,v1,fp32` + - Environment variables: + - *CM_ML_MODEL_MOBILENET_NAME_PREFIX*: `` + - Workflow: + * `_tf,v1,int8` + - Environment variables: + - *CM_ML_MODEL_MOBILENET_NAME_SUFFIX*: `_quant` + - Workflow: + * `_tf,v1,uint8` + - Environment variables: + - *CM_ML_MODEL_MOBILENET_NAME_SUFFIX*: `_quant` + - Workflow: + * `_tf,v2,fp32` + - Environment variables: + - *CM_ML_MODEL_MOBILENET_NAME_PREFIX*: `` + - *CM_ML_MODEL_VER*: `2_<<>>_<<>>` + - *CM_ML_MODEL_OUTPUT_LAYER_NAME*: `MobilenetV2/Predictions/Reshape_1` + - Workflow: + * `_tf,v2,int8` + - Environment variables: + - *CM_ML_MODEL_MOBILENET_NAME_PREFIX*: `quantized` + - *CM_ML_MODEL_VER*: `2_<<>>_<<>>` + - *CM_ML_MODEL_OUTPUT_LAYER_NAME*: `MobilenetV2/Predictions/Softmax` + - Workflow: + * `_tf,v2,uint8` + - Environment variables: + - *CM_ML_MODEL_MOBILENET_NAME_PREFIX*: `quantized` + - *CM_ML_MODEL_VER*: `2_<<>>_<<>>` + - *CM_ML_MODEL_OUTPUT_LAYER_NAME*: `MobilenetV2/Predictions/Softmax` + - Workflow: + * `_tf,v3` + - Environment variables: + - *CM_ML_MODEL_VER*: `3_<<>>_<<>>` + - *CM_ML_MODEL_OUTPUT_LAYER_NAME*: `MobilenetV3/Predictions/Softmax` + - Workflow: + * `_tflite` + - Workflow: + +
+ + + * Group "**framework**" +
+ Click here to expand this section. + + * `_onnx` + - Environment variables: + - *CM_ML_MODEL_DATA_LAYOUT*: `NCHW` + - *CM_ML_MODEL_FRAMEWORK*: `onnx` + - Workflow: + * **`_tf`** (default) + - Environment variables: + - *CM_ML_MODEL_DATA_LAYOUT*: `NHWC` + - *CM_ML_MODEL_NORMALIZE_DATA*: `yes` + - *CM_ML_MODEL_SUBTRACT_MEANS*: `no` + - *CM_ML_MODEL_INPUT_LAYER_NAME*: `input` + - Workflow: + +
+ + + * Group "**kind**" +
+ Click here to expand this section. + + * `_large` + - Environment variables: + - *CM_ML_MODEL_MOBILENET_KIND*: `large` + - Workflow: + * `_large-minimalistic` + - Environment variables: + - *CM_ML_MODEL_MOBILENET_KIND*: `large-minimalistic` + - Workflow: + * `_small` + - Environment variables: + - *CM_ML_MODEL_MOBILENET_KIND*: `small` + - Workflow: + * `_small-minimalistic` + - Environment variables: + - *CM_ML_MODEL_MOBILENET_KIND*: `small-minimalistic` + - Workflow: + +
+ + + * Group "**multiplier**" +
+ Click here to expand this section. + + * `_multiplier-0.25` + - Environment variables: + - *CM_ML_MODEL_MOBILENET_MULTIPLIER*: `0.25` + - *CM_ML_MODEL_MOBILENET_MULTIPLIER_PERCENTAGE*: `25` + - Workflow: + * `_multiplier-0.35` + - Environment variables: + - *CM_ML_MODEL_MOBILENET_MULTIPLIER*: `0.35` + - *CM_ML_MODEL_MOBILENET_MULTIPLIER_PERCENTAGE*: `35` + - Workflow: + * `_multiplier-0.5` + - Environment variables: + - *CM_ML_MODEL_MOBILENET_MULTIPLIER*: `0.5` + - *CM_ML_MODEL_MOBILENET_MULTIPLIER_PERCENTAGE*: `50` + - Workflow: + * `_multiplier-0.75` + - Environment variables: + - *CM_ML_MODEL_MOBILENET_MULTIPLIER*: `0.75` + - *CM_ML_MODEL_MOBILENET_MULTIPLIER_PERCENTAGE*: `75` + - Workflow: + * `_multiplier-1.0` + - Environment variables: + - *CM_ML_MODEL_MOBILENET_MULTIPLIER*: `1.0` + - *CM_ML_MODEL_MOBILENET_MULTIPLIER_PERCENTAGE*: `100` + - Workflow: + +
+ + + * Group "**opset-version**" +
+ Click here to expand this section. + + * `_opset-11` + - Environment variables: + - *CM_ML_MODEL_ONNX_OPSET*: `11` + - Workflow: + * `_opset-8` + - Environment variables: + - *CM_ML_MODEL_ONNX_OPSET*: `8` + - Workflow: + +
+ + + * Group "**precision**" +
+ Click here to expand this section. + + * **`_fp32`** (default) + - Environment variables: + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_PRECISION*: `fp32` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_MOBILENET_PRECISION*: `float` + - Workflow: + * `_int8` + - Environment variables: + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int8` + - *CM_ML_MODEL_PRECISION*: `int8` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` + - *CM_ML_MODEL_MOBILENET_PRECISION*: `int8` + - Workflow: + * `_uint8` + - Environment variables: + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `uint8` + - *CM_ML_MODEL_PRECISION*: `uint8` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `uint8` + - *CM_ML_MODEL_MOBILENET_PRECISION*: `uint8` + - Workflow: + +
+ + + * Group "**resolution**" +
+ Click here to expand this section. + + * `_resolution-128` + - Environment variables: + - *CM_ML_MODEL_MOBILENET_RESOLUTION*: `128` + - *CM_ML_MODEL_IMAGE_HEIGHT*: `128` + - *CM_ML_MODEL_IMAGE_WIDTH*: `128` + - *CM_DATASET_PREPROCESSED_IMAGENET_DEP_TAGS*: `_resolution.128` + - Workflow: + * `_resolution-160` + - Environment variables: + - *CM_ML_MODEL_MOBILENET_RESOLUTION*: `160` + - *CM_ML_MODEL_IMAGE_HEIGHT*: `160` + - *CM_ML_MODEL_IMAGE_WIDTH*: `160` + - *CM_DATASET_PREPROCESSED_IMAGENET_DEP_TAGS*: `_resolution.160` + - Workflow: + * `_resolution-192` + - Environment variables: + - *CM_ML_MODEL_MOBILENET_RESOLUTION*: `192` + - *CM_ML_MODEL_IMAGE_HEIGHT*: `192` + - *CM_ML_MODEL_IMAGE_WIDTH*: `192` + - *CM_DATASET_PREPROCESSED_IMAGENET_DEP_TAGS*: `_resolution.192` + - Workflow: + * `_resolution-224` + - Environment variables: + - *CM_ML_MODEL_MOBILENET_RESOLUTION*: `224` + - *CM_ML_MODEL_IMAGE_HEIGHT*: `224` + - *CM_ML_MODEL_IMAGE_WIDTH*: `224` + - *CM_DATASET_PREPROCESSED_IMAGENET_DEP_TAGS*: `_resolution.224` + - Workflow: + +
+ + + * Group "**source**" +
+ Click here to expand this section. + + * `_from.google` + - Environment variables: + - *CM_DOWNLOAD_SOURCE*: `google` + - Workflow: + * `_from.zenodo` + - Environment variables: + - *CM_DOWNLOAD_SOURCE*: `zenodo` + - Workflow: + +
+ + + * Group "**version**" +
+ Click here to expand this section. + + * `_v1` + - Environment variables: + - *CM_ML_MODEL_MOBILENET_VERSION*: `1` + - *CM_ML_MODEL_FULL_NAME*: `mobilenet-v1-precision_<<>>-<<>>-<<>>` + - Workflow: + * `_v2` + - Environment variables: + - *CM_ML_MODEL_MOBILENET_VERSION*: `2` + - *CM_ML_MODEL_VER*: `2` + - *CM_ML_MODEL_FULL_NAME*: `mobilenet-v2-precision_<<>>-<<>>-<<>>` + - Workflow: + * **`_v3`** (default) + - Environment variables: + - *CM_ML_MODEL_MOBILENET_VERSION*: `3` + - *CM_ML_MODEL_VER*: `3` + - *CM_ML_MODEL_FULL_NAME*: `mobilenet-v3-precision_<<>>-<<>>-<<>>` + - Workflow: + +
+ + +#### Default variations + +`_fp32,_tf,_v3` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_ML_MODEL: `mobilenet` +* CM_ML_MODEL_DATASET: `imagenet2012-val` +* CM_ML_MODEL_RETRAINING: `no` +* CM_ML_MODEL_WEIGHT_TRANSFORMATIONS: `no` +* CM_ML_MODEL_INPUTS_DATA_TYPE: `fp32` +* CM_ML_MODEL_WEIGHTS_DATA_TYPE: `fp32` +* CM_ML_MODEL_MOBILENET_NAME_SUFFIX: `` + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-mobilenet/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-mobilenet/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-mobilenet/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-mobilenet/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-mobilenet/_cm.json) + +___ +### Script output +`cmr "get ml-model mobilenet raw ml-model-mobilenet image-classification [,variations]" -j` +#### New environment keys (filter) + +* `CM_DATASET_PREPROCESSED_IMAGENET_DEP_TAGS` +* `CM_ML_MODEL_*` +#### New environment keys auto-detected from customize + +* `CM_ML_MODEL_FILE` +* `CM_ML_MODEL_FILE_WITH_PATH` +* `CM_ML_MODEL_PATH` +* `CM_ML_MODEL_STARTING_WEIGHTS_FILENAME` \ No newline at end of file diff --git a/docs/AI-ML-models/get-ml-model-neuralmagic-zoo.md b/docs/AI-ML-models/get-ml-model-neuralmagic-zoo.md new file mode 100644 index 0000000000..90b5c47318 --- /dev/null +++ b/docs/AI-ML-models/get-ml-model-neuralmagic-zoo.md @@ -0,0 +1,335 @@ +Automatically generated README for this automation recipe: **get-ml-model-neuralmagic-zoo** + +Category: **AI/ML models** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-neuralmagic-zoo,adbb3f2525a14f97) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-neuralmagic-zoo)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,ml-model,model,zoo,deepsparse,model-zoo,sparse-zoo,neuralmagic,neural-magic* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get ml-model model zoo deepsparse model-zoo sparse-zoo neuralmagic neural-magic" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,ml-model,model,zoo,deepsparse,model-zoo,sparse-zoo,neuralmagic,neural-magic` + +`cm run script --tags=get,ml-model,model,zoo,deepsparse,model-zoo,sparse-zoo,neuralmagic,neural-magic[,variations] ` + +*or* + +`cmr "get ml-model model zoo deepsparse model-zoo sparse-zoo neuralmagic neural-magic"` + +`cmr "get ml-model model zoo deepsparse model-zoo sparse-zoo neuralmagic neural-magic [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,ml-model,model,zoo,deepsparse,model-zoo,sparse-zoo,neuralmagic,neural-magic' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,ml-model,model,zoo,deepsparse,model-zoo,sparse-zoo,neuralmagic,neural-magic"``` + +#### Run this script via Docker (beta) + +`cm docker script "get ml-model model zoo deepsparse model-zoo sparse-zoo neuralmagic neural-magic[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_bert-base-pruned90-none` + - Aliases: `_model-stub.zoo:nlp/question_answering/bert-base/pytorch/huggingface/squad/pruned90-none` + - Environment variables: + - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/bert-base/pytorch/huggingface/squad/pruned90-none` + - *CM_ML_MODEL_FULL_NAME*: `bert-base-pruned90-none-bert-99` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://huggingface.co/bert-base-uncased` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `unstructured pruning` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_RETRAINING*: `no` + - Workflow: + * `_bert-base-pruned95_obs_quant-none` + - Aliases: `_model-stub.zoo:nlp/question_answering/bert-base/pytorch/huggingface/squad/pruned95_obs_quant-none` + - Environment variables: + - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/bert-base/pytorch/huggingface/squad/pruned95_obs_quant-none` + - *CM_ML_MODEL_FULL_NAME*: `bert-base-pruned95_obs_quant-none-bert-99` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://huggingface.co/bert-base-uncased` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, unstructured pruning` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int64` + - *CM_ML_MODEL_RETRAINING*: `yes` + - Workflow: + * `_bert-base_cased-pruned90-none` + - Aliases: `_model-stub.zoo:nlp/question_answering/bert-base_cased/pytorch/huggingface/squad/pruned90-none` + - Environment variables: + - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/bert-base_cased/pytorch/huggingface/squad/pruned90-none` + - *CM_ML_MODEL_FULL_NAME*: `bert-base_cased-pruned90-none-bert-99` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://huggingface.co/bert-base-cased` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `unstructured pruning` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_RETRAINING*: `no` + - Workflow: + * `_bert-large-base-none` + - Aliases: `_model-stub.zoo:nlp/question_answering/bert-large/pytorch/huggingface/squad/base-none` + - Environment variables: + - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/bert-large/pytorch/huggingface/squad/base-none` + - *CM_ML_MODEL_FULL_NAME*: `bert-large-base-none-bert-99` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://huggingface.co/bert-large-uncased` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `unstructured pruning` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_RETRAINING*: `no` + - Workflow: + * `_bert-large-pruned80_quant-none-vnni` + - Aliases: `_model-stub.zoo:nlp/question_answering/bert-large/pytorch/huggingface/squad/pruned80_quant-none-vnni` + - Environment variables: + - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/bert-large/pytorch/huggingface/squad/pruned80_quant-none-vnni` + - *CM_ML_MODEL_FULL_NAME*: `bert-large-pruned80_quant-none-vnni-bert-99` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://huggingface.co/bert-large-uncased` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, unstructured pruning` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int64` + - *CM_ML_MODEL_RETRAINING*: `no` + - Workflow: + * `_mobilebert-14layer_pruned50-none-vnni` + - Aliases: `_model-stub.zoo:nlp/question_answering/mobilebert-none/pytorch/huggingface/squad/14layer_pruned50-none-vnni` + - Environment variables: + - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/mobilebert-none/pytorch/huggingface/squad/14layer_pruned50-none-vnni` + - *CM_ML_MODEL_FULL_NAME*: `mobilebert-14layer_pruned50-none-vnni-bert-99` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://storage.googleapis.com/cloud-tpu-checkpoints/mobilebert/uncased_L-24_H-128_B-512_A-4_F-4_OPT.tar.gz` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `unstructured pruning` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_RETRAINING*: `no` + - Workflow: + * `_mobilebert-14layer_pruned50_quant-none-vnni` + - Aliases: `_model-stub.zoo:nlp/question_answering/mobilebert-none/pytorch/huggingface/squad/14layer_pruned50_quant-none-vnni` + - Environment variables: + - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/mobilebert-none/pytorch/huggingface/squad/14layer_pruned50_quant-none-vnni` + - *CM_ML_MODEL_FULL_NAME*: `mobilebert-14layer_pruned50_quant-none-vnni-bert-99` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://storage.googleapis.com/cloud-tpu-checkpoints/mobilebert/uncased_L-24_H-128_B-512_A-4_F-4_OPT.tar.gz` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, unstructured pruning` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int64` + - *CM_ML_MODEL_RETRAINING*: `yes` + - Workflow: + * `_mobilebert-base_quant-none` + - Aliases: `_model-stub.zoo:nlp/question_answering/mobilebert-none/pytorch/huggingface/squad/base_quant-none` + - Environment variables: + - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/mobilebert-none/pytorch/huggingface/squad/base_quant-none` + - *CM_ML_MODEL_FULL_NAME*: `mobilebert-base_quant-none-bert-99` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://storage.googleapis.com/cloud-tpu-checkpoints/mobilebert/uncased_L-24_H-128_B-512_A-4_F-4_OPT.tar.gz` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, unstructured pruning` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int64` + - *CM_ML_MODEL_RETRAINING*: `yes` + - Workflow: + * `_mobilebert-none-base-none` + - Aliases: `_model-stub.zoo:nlp/question_answering/mobilebert-none/pytorch/huggingface/squad/base-none` + - Environment variables: + - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/mobilebert-none/pytorch/huggingface/squad/base-none` + - *CM_ML_MODEL_FULL_NAME*: `mobilebert-none-base-none-bert-99` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://storage.googleapis.com/cloud-tpu-checkpoints/mobilebert/uncased_L-24_H-128_B-512_A-4_F-4_OPT.tar.gz` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `unstructured pruning` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_RETRAINING*: `no` + - Workflow: + * `_model-stub.#` + - Environment variables: + - *CM_MODEL_ZOO_STUB*: `#` + - Workflow: + * `_obert-base-pruned90-none` + - Aliases: `_model-stub.zoo:nlp/question_answering/obert-base/pytorch/huggingface/squad/pruned90-none` + - Environment variables: + - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/obert-base/pytorch/huggingface/squad/pruned90-none` + - *CM_ML_MODEL_FULL_NAME*: `obert-base-pruned90-none-bert-99` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://huggingface.co/bert-large-uncased` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `unstructured pruning` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_RETRAINING*: `no` + - Workflow: + * `_obert-large-base-none` + - Aliases: `_model-stub.zoo:nlp/question_answering/obert-large/pytorch/huggingface/squad/base-none` + - Environment variables: + - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/obert-large/pytorch/huggingface/squad/base-none` + - *CM_ML_MODEL_FULL_NAME*: `obert-large-base-none-bert-99` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://huggingface.co/bert-large-uncased` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `unstructured pruning` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_RETRAINING*: `no` + - Workflow: + * `_obert-large-pruned95-none-vnni` + - Aliases: `_model-stub.zoo:nlp/question_answering/obert-large/pytorch/huggingface/squad/pruned95-none-vnni` + - Environment variables: + - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/obert-large/pytorch/huggingface/squad/pruned95-none-vnni` + - *CM_ML_MODEL_FULL_NAME*: `obert-large-pruned95-none-vnni-bert-99` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://huggingface.co/bert-large-uncased` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `unstructured pruning` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_RETRAINING*: `no` + - Workflow: + * `_obert-large-pruned95_quant-none-vnni` + - Aliases: `_model-stub.zoo:nlp/question_answering/obert-large/pytorch/huggingface/squad/pruned95_quant-none-vnni` + - Environment variables: + - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/obert-large/pytorch/huggingface/squad/pruned95_quant-none-vnni` + - *CM_ML_MODEL_FULL_NAME*: `obert-large-pruned95_quant-none-vnni-bert-99` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://huggingface.co/bert-large-uncased` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, unstructured pruning` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int64` + - *CM_ML_MODEL_RETRAINING*: `yes` + - Workflow: + * `_obert-large-pruned97-none` + - Aliases: `_model-stub.zoo:nlp/question_answering/obert-large/pytorch/huggingface/squad/pruned97-none` + - Environment variables: + - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/obert-large/pytorch/huggingface/squad/pruned97-none` + - *CM_ML_MODEL_FULL_NAME*: `obert-large-pruned97-none-bert-99` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://huggingface.co/bert-large-uncased` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `unstructured pruning` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_RETRAINING*: `no` + - Workflow: + * `_obert-large-pruned97-quant-none` + - Aliases: `_model-stub.zoo:nlp/question_answering/obert-large/pytorch/huggingface/squad/pruned97_quant-none` + - Environment variables: + - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/obert-large/pytorch/huggingface/squad/pruned97_quant-none` + - *CM_ML_MODEL_FULL_NAME*: `obert-large-pruned97-quant-none-bert-99` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://huggingface.co/bert-large-uncased` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, unstructured pruning` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int64` + - *CM_ML_MODEL_RETRAINING*: `no` + - Workflow: + * `_oberta-base-pruned90-quant-none` + - Aliases: `_model-stub.zoo:nlp/question_answering/oberta-base/pytorch/huggingface/squad/pruned90_quant-none` + - Environment variables: + - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/oberta-base/pytorch/huggingface/squad/pruned90_quant-none` + - *CM_ML_MODEL_FULL_NAME*: `oberta-base-pruned90-quant-none-bert-99` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://huggingface.co/roberta-base` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, unstructured pruning` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int64` + - *CM_ML_MODEL_RETRAINING*: `no` + - Workflow: + * `_roberta-base-pruned85-quant-none` + - Aliases: `_model-stub.zoo:nlp/question_answering/roberta-base/pytorch/huggingface/squad/pruned85_quant-none` + - Environment variables: + - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/roberta-base/pytorch/huggingface/squad/pruned85_quant-none` + - *CM_ML_MODEL_FULL_NAME*: `roberta-base-pruned85-quant-none-bert-99` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://huggingface.co/roberta-base` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, unstructured pruning` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int64` + - *CM_ML_MODEL_RETRAINING*: `no` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-neuralmagic-zoo/_cm.json)*** + * get,python3 + * CM names: `--adr.['python3', 'python']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-python-lib,_package.protobuf + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_sparsezoo + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-neuralmagic-zoo/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-neuralmagic-zoo/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-neuralmagic-zoo/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-neuralmagic-zoo/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-neuralmagic-zoo/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-neuralmagic-zoo/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-neuralmagic-zoo/_cm.json) + +___ +### Script output +`cmr "get ml-model model zoo deepsparse model-zoo sparse-zoo neuralmagic neural-magic [,variations]" -j` +#### New environment keys (filter) + +* `CM_GET_DEPENDENT_CACHED_PATH` +* `CM_MLPERF_CUSTOM_MODEL_PATH` +* `CM_ML_MODEL*` +* `CM_MODEL_ZOO_STUB` +#### New environment keys auto-detected from customize + +* `CM_GET_DEPENDENT_CACHED_PATH` +* `CM_MLPERF_CUSTOM_MODEL_PATH` \ No newline at end of file diff --git a/docs/AI-ML-models/get-ml-model-resnet50.md b/docs/AI-ML-models/get-ml-model-resnet50.md new file mode 100644 index 0000000000..ff2c976a8c --- /dev/null +++ b/docs/AI-ML-models/get-ml-model-resnet50.md @@ -0,0 +1,356 @@ +Automatically generated README for this automation recipe: **get-ml-model-resnet50** + +Category: **AI/ML models** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-resnet50,56203e4e998b4bc0) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-resnet50)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,raw,ml-model,resnet50,ml-model-resnet50,image-classification* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get raw ml-model resnet50 ml-model-resnet50 image-classification" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,raw,ml-model,resnet50,ml-model-resnet50,image-classification` + +`cm run script --tags=get,raw,ml-model,resnet50,ml-model-resnet50,image-classification[,variations] ` + +*or* + +`cmr "get raw ml-model resnet50 ml-model-resnet50 image-classification"` + +`cmr "get raw ml-model resnet50 ml-model-resnet50 image-classification [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,raw,ml-model,resnet50,ml-model-resnet50,image-classification' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,raw,ml-model,resnet50,ml-model-resnet50,image-classification"``` + +#### Run this script via Docker (beta) + +`cm docker script "get raw ml-model resnet50 ml-model-resnet50 image-classification[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_batch_size.#` + - Environment variables: + - *CM_ML_MODEL_BATCH_SIZE*: `#` + - Workflow: + * `_batch_size.1` + - Environment variables: + - *CM_ML_MODEL_BATCH_SIZE*: `1` + - Workflow: + * `_fix-input-shape` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * `_from-tf` + - Workflow: + * `_huggingface_default` + - Environment variables: + - *CM_PACKAGE_URL*: `https://huggingface.co/ctuning/mlperf-inference-resnet50-onnx-fp32-imagenet2012-v1.0/resolve/main/resnet50_v1.onnx` + - Workflow: + * `_ncnn,fp32` + - Environment variables: + - *CM_PACKAGE_URL*: `https://zenodo.org/record/8073420/files/resnet50_v1.bin?download=1` + - Workflow: + 1. ***Read "post_deps" on other CM scripts*** + * download-and-extract,_url.https://zenodo.org/record/8073420/files/resnet50_v1.param?download= + - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) + * `_onnx,from-tf` + - Environment variables: + - *CM_ML_MODEL_DATA_LAYOUT*: `NHWC` + - *CM_ML_MODEL_FRAMEWORK*: `onnx` + - *CM_ML_MODEL_INPUT_LAYERS*: `input_tensor` + - *CM_ML_MODEL_INPUT_LAYER_NAME*: `input_tensor` + - *CM_ML_MODEL_INPUT_SHAPES*: `\"input_tensor\": (BATCH_SIZE, 224, 224, 3)` + - *CM_ML_MODEL_OUTPUT_LAYERS*: `softmax_tensor` + - *CM_ML_MODEL_OUTPUT_LAYER_NAME*: `softmax_tensor` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/2535873/files/resnet50_v1.pb` + - Workflow: + * `_onnx,from-tf,fp32` + - Environment variables: + - *CM_DOWNLOAD_FILENAME*: `resnet50_v1_modified.onnx` + - *CM_PACKAGE_URL*: `https://drive.google.com/uc?id=15wZ_8Vt12cb10IEBsln8wksD1zGwlbOM` + - Workflow: + * `_onnx,opset-11` + - Environment variables: + - *CM_PACKAGE_URL*: `https://zenodo.org/record/4735647/files/resnet50_v1.onnx` + - Workflow: + * `_onnx,opset-8` + - Environment variables: + - *CM_PACKAGE_URL*: `https://zenodo.org/record/2592612/files/resnet50_v1.onnx` + - Workflow: + * `_pytorch,fp32` + - Environment variables: + - *CM_PACKAGE_URL*: `https://zenodo.org/record/4588417/files/resnet50-19c8e357.pth` + - Workflow: + * `_pytorch,int8` + - Environment variables: + - *CM_PACKAGE_URL*: `https://zenodo.org/record/4589637/files/resnet50_INT8bit_quantized.pt` + - Workflow: + * `_tensorflow,fix-input-shape` + - Environment variables: + - *CM_ML_MODEL_TF_FIX_INPUT_SHAPE*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_package.tensorflow + * CM names: `--adr.['tensorflow']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_tflite,argmax` + - Environment variables: + - *CM_DAE_EXTRACT_DOWNLOADED*: `yes` + - *CM_DOWNLOAD_FINAL_ENV_NAME*: `` + - *CM_EXTRACT_FINAL_ENV_NAME*: `CM_ML_MODEL_FILE_WITH_PATH` + - *CM_ML_MODEL_FILE*: `resnet50_v1.tflite` + - *CM_ML_MODEL_INPUT_SHAPES*: `\"input_tensor 2\": (BATCH_SIZE, 224, 224, 3)` + - *CM_PACKAGE_URL*: `https://www.dropbox.com/s/cvv2zlfo80h54uz/resnet50_v1.tflite.gz?dl=1` + - Workflow: + * `_tflite,int8,no-argmax` + - Environment variables: + - *CM_DOWNLOAD_FINAL_ENV_NAME*: `CM_ML_MODEL_FILE_WITH_PATH` + - *CM_ML_MODEL_FILE*: `resnet50_quant_full_mlperf_edgetpu.tflite` + - *CM_ML_MODEL_INPUT_SHAPES*: `\"input_tensor 2\": (BATCH_SIZE, 224, 224, 3)` + - *CM_PACKAGE_URL*: `https://zenodo.org/record/8234946/files/resnet50_quant_full_mlperf_edgetpu.tflite?download=1` + - Workflow: + * `_tflite,no-argmax` + - Environment variables: + - *CM_ML_MODEL_FILE*: `resnet50_v1.no-argmax.tflite` + - *CM_ML_MODEL_INPUT_SHAPES*: `\"input_tensor 2\": (BATCH_SIZE, 224, 224, 3)` + - *CM_PACKAGE_URL*: `https://www.dropbox.com/s/vhuqo0wc39lky0a/resnet50_v1.no-argmax.tflite?dl=1` + - Workflow: + +
+ + + * Group "**framework**" +
+ Click here to expand this section. + + * `_ncnn` + - Environment variables: + - *CM_ML_MODEL_FRAMEWORK*: `ncnn` + - Workflow: + * **`_onnx`** (default) + - Aliases: `_onnxruntime` + - Environment variables: + - *CM_ML_MODEL_DATA_LAYOUT*: `NCHW` + - *CM_ML_MODEL_FRAMEWORK*: `onnx` + - *CM_ML_MODEL_INPUT_LAYERS*: `input_tensor:0` + - *CM_ML_MODEL_INPUT_LAYER_NAME*: `input_tensor:0` + - *CM_ML_MODEL_INPUT_SHAPES*: `\"input_tensor:0\": (BATCH_SIZE, 3, 224, 224)` + - *CM_ML_MODEL_OUTPUT_LAYERS*: `softmax_tensor:0` + - *CM_ML_MODEL_OUTPUT_LAYER_NAME*: `softmax_tensor:0` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `<<>>` + - *CM_ML_MODEL_VER*: `1.5` + - Workflow: + * `_pytorch` + - Environment variables: + - *CM_ML_MODEL_DATA_LAYOUT*: `NCHW` + - *CM_ML_MODEL_FRAMEWORK*: `pytorch` + - *CM_ML_MODEL_GIVEN_CHANNEL_MEANS*: `?` + - *CM_ML_MODEL_INPUT_LAYER_NAME*: `input_tensor:0` + - *CM_ML_MODEL_INPUT_SHAPES*: `\"input_tensor:0\": [BATCH_SIZE, 3, 224, 224]` + - *CM_ML_MODEL_OUTPUT_LAYERS*: `output` + - *CM_ML_MODEL_OUTPUT_LAYER_NAME*: `?` + - *CM_ML_STARTING_WEIGHTS_FILENAME*: `<<>>` + - Workflow: + * `_tensorflow` + - Aliases: `_tf` + - Environment variables: + - *CM_ML_MODEL_ACCURACY*: `76.456` + - *CM_ML_MODEL_DATA_LAYOUT*: `NHWC` + - *CM_ML_MODEL_FRAMEWORK*: `tensorflow` + - *CM_ML_MODEL_GIVEN_CHANNEL_MEANS*: `123.68 116.78 103.94` + - *CM_ML_MODEL_INPUT_LAYERS*: `input_tensor` + - *CM_ML_MODEL_INPUT_LAYER_NAME*: `input_tensor` + - *CM_ML_MODEL_INPUT_SHAPES*: `\"input_tensor:0\": (BATCH_SIZE, 3, 224, 224)` + - *CM_ML_MODEL_NORMALIZE_DATA*: `0` + - *CM_ML_MODEL_OUTPUT_LAYERS*: `softmax_tensor` + - *CM_ML_MODEL_OUTPUT_LAYER_NAME*: `softmax_tensor` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `<<>>` + - *CM_ML_MODEL_SUBTRACT_MEANS*: `YES` + - *CM_PACKAGE_URL*: `https://zenodo.org/record/2535873/files/resnet50_v1.pb` + - Workflow: + * `_tflite` + - Environment variables: + - *CM_ML_MODEL_ACCURACY*: `76.456` + - *CM_ML_MODEL_DATA_LAYOUT*: `NHWC` + - *CM_ML_MODEL_FRAMEWORK*: `tflite` + - *CM_ML_MODEL_GIVEN_CHANNEL_MEANS*: `123.68 116.78 103.94` + - *CM_ML_MODEL_INPUT_LAYERS*: `input_tensor` + - *CM_ML_MODEL_INPUT_LAYER_NAME*: `input_tensor` + - *CM_ML_MODEL_INPUT_SHAPES*: `\"input_tensor 2\": (BATCH_SIZE, 224, 224, 3)` + - *CM_ML_MODEL_NORMALIZE_DATA*: `0` + - *CM_ML_MODEL_OUTPUT_LAYERS*: `softmax_tensor` + - *CM_ML_MODEL_OUTPUT_LAYER_NAME*: `softmax_tensor` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `<<>>` + - *CM_ML_MODEL_SUBTRACT_MEANS*: `YES` + - Workflow: + +
+ + + * Group "**model-output**" +
+ Click here to expand this section. + + * **`_argmax`** (default) + - Environment variables: + - *CM_ML_MODEL_OUTPUT_LAYER_ARGMAX*: `yes` + - Workflow: + * `_no-argmax` + - Environment variables: + - *CM_ML_MODEL_OUTPUT_LAYER_ARGMAX*: `no` + - Workflow: + +
+ + + * Group "**opset-version**" +
+ Click here to expand this section. + + * `_opset-11` + - Environment variables: + - *CM_ML_MODEL_ONNX_OPSET*: `11` + - Workflow: + * `_opset-8` + - Environment variables: + - *CM_ML_MODEL_ONNX_OPSET*: `8` + - Workflow: + +
+ + + * Group "**precision**" +
+ Click here to expand this section. + + * **`_fp32`** (default) + - Environment variables: + - *CM_ML_MODEL_INPUT_DATA_TYPES*: `fp32` + - *CM_ML_MODEL_PRECISION*: `fp32` + - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `fp32` + - Workflow: + * `_int8` + - Environment variables: + - *CM_ML_MODEL_INPUT_DATA_TYPES*: `int8` + - *CM_ML_MODEL_PRECISION*: `int8` + - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `int8` + - Workflow: + * `_uint8` + - Environment variables: + - *CM_ML_MODEL_INPUT_DATA_TYPES*: `uint8` + - *CM_ML_MODEL_PRECISION*: `uint8` + - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `uint8` + - Workflow: + +
+ + +#### Default variations + +`_argmax,_fp32,_onnx` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-resnet50/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-resnet50/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-resnet50/_cm.json)*** + * download-and-extract + * CM names: `--adr.['model-downloader']...` + - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) + 1. ***Run native script if exists*** + * [run-fix-input.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-resnet50/run-fix-input.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-resnet50/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-resnet50/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-resnet50/_cm.json) + +___ +### Script output +`cmr "get raw ml-model resnet50 ml-model-resnet50 image-classification [,variations]" -j` +#### New environment keys (filter) + +* `CM_ML_MODEL_*` +#### New environment keys auto-detected from customize + +* `CM_ML_MODEL_FILE` +* `CM_ML_MODEL_FILE_WITH_PATH` +* `CM_ML_MODEL_STARTING_FILE_PATH` \ No newline at end of file diff --git a/docs/AI-ML-models/get-ml-model-retinanet-nvidia.md b/docs/AI-ML-models/get-ml-model-retinanet-nvidia.md new file mode 100644 index 0000000000..5fba8e6686 --- /dev/null +++ b/docs/AI-ML-models/get-ml-model-retinanet-nvidia.md @@ -0,0 +1,172 @@ +Automatically generated README for this automation recipe: **get-ml-model-retinanet-nvidia** + +Category: **AI/ML models** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-retinanet-nvidia,f059d249fac843ba) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet-nvidia)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,ml-model,nvidia-retinanet,nvidia* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get ml-model nvidia-retinanet nvidia" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,ml-model,nvidia-retinanet,nvidia` + +`cm run script --tags=get,ml-model,nvidia-retinanet,nvidia[,variations] ` + +*or* + +`cmr "get ml-model nvidia-retinanet nvidia"` + +`cmr "get ml-model nvidia-retinanet nvidia [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,ml-model,nvidia-retinanet,nvidia' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,ml-model,nvidia-retinanet,nvidia"``` + +#### Run this script via Docker (beta) + +`cm docker script "get ml-model nvidia-retinanet nvidia[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_efficient-nms` + - Environment variables: + - *CM_NVIDIA_EFFICIENT_NMS*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_polygraphy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_TORCH_DEVICE: `cpu` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet-nvidia/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,mlperf,training,src,_nvidia-retinanet + - CM script: [get-mlperf-training-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-training-src) + * get,mlperf,inference,src + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,ml-model,retinanet,_pytorch,_fp32,_weights + - CM script: [get-ml-model-retinanet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet) + * get,generic-python-lib,_torch + * Enable this dependency only if all ENV vars are set:
+`{'CM_TORCH_DEVICE': 'cpu'}` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torchvision + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_mlperf_logging + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,cuda + * Enable this dependency only if all ENV vars are set:
+`{'CM_TORCH_DEVICE': 'cuda'}` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * get,generic-python-lib,_torch_cuda + * Enable this dependency only if all ENV vars are set:
+`{'CM_TORCH_DEVICE': 'cuda'}` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,nvidia,mlperf,inference,common-code,-_custom + - CM script: [get-mlperf-inference-nvidia-common-code](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-nvidia-common-code) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet-nvidia/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet-nvidia/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet-nvidia/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet-nvidia/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet-nvidia/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet-nvidia/_cm.json) + +___ +### Script output +`cmr "get ml-model nvidia-retinanet nvidia [,variations]" -j` +#### New environment keys (filter) + +* `CM_ML_MODEL_*` +* `CM_NVIDIA_RETINANET_*` +#### New environment keys auto-detected from customize + +* `CM_ML_MODEL_ANCHOR_PATH` +* `CM_ML_MODEL_DYN_BATCHSIZE_PATH` +* `CM_NVIDIA_RETINANET_EFFICIENT_NMS_CONCAT_MODEL_WITH_PATH` \ No newline at end of file diff --git a/docs/AI-ML-models/get-ml-model-retinanet.md b/docs/AI-ML-models/get-ml-model-retinanet.md new file mode 100644 index 0000000000..4df53983b8 --- /dev/null +++ b/docs/AI-ML-models/get-ml-model-retinanet.md @@ -0,0 +1,225 @@ +Automatically generated README for this automation recipe: **get-ml-model-retinanet** + +Category: **AI/ML models** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-retinanet,427bc5665e4541c2) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,ml-model,raw,resnext50,retinanet,object-detection* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get ml-model raw resnext50 retinanet object-detection" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,ml-model,raw,resnext50,retinanet,object-detection` + +`cm run script --tags=get,ml-model,raw,resnext50,retinanet,object-detection[,variations] ` + +*or* + +`cmr "get ml-model raw resnext50 retinanet object-detection"` + +`cmr "get ml-model raw resnext50 retinanet object-detection [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,ml-model,raw,resnext50,retinanet,object-detection' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,ml-model,raw,resnext50,retinanet,object-detection"``` + +#### Run this script via Docker (beta) + +`cm docker script "get ml-model raw resnext50 retinanet object-detection[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_no-nms` + - Environment variables: + - *CM_TMP_ML_MODEL_RETINANET_NO_NMS*: `yes` + - *CM_ML_MODEL_RETINANET_NO_NMS*: `yes` + - *CM_QAIC_PRINT_NODE_PRECISION_INFO*: `yes` + - Workflow: + * `_onnx,fp32` + - Environment variables: + - *CM_PACKAGE_URL*: `https://zenodo.org/record/6617879/files/resnext50_32x4d_fpn.onnx` + - *CM_DOWNLOAD_CHECKSUM*: `4544f4e56e0a4684215831cc937ea45c` + - *CM_ML_MODEL_ACCURACY*: `0.3757` + - Workflow: + * `_onnx,no-nms` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,python3 + * CM names: `--adr.['python, python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-python-lib,_package.onnx + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.onnxsim + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * download,file,_url.https://raw.githubusercontent.com/arjunsuresh/ck-qaic/main/package/model-onnx-mlperf-retinanet-no-nms/remove-nms-and-extract-priors.patch + - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) + * get,git,repo,_repo.https://github.com/mlcommons/training.git,_patch + * CM names: `--adr.['mlperf-training-src']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + * get,ml-model,retinanet,_pytorch,_fp32,_weights + * CM names: `--adr.['pytorch-weights']...` + - CM script: [get-ml-model-retinanet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet) + * get,generic-python-lib,_package.torch + * CM names: `--adr.['torch', 'pytorch']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_pytorch,fp32` + - Environment variables: + - *CM_PACKAGE_URL*: `https://zenodo.org/record/6617981/files/resnext50_32x4d_fpn.pth` + - *CM_ML_MODEL_ACCURACY*: `0.3755` + - Workflow: + * `_pytorch,fp32,weights` + - Environment variables: + - *CM_PACKAGE_URL*: `https://zenodo.org/record/6605272/files/retinanet_model_10.zip?download=1` + - *CM_UNZIP*: `yes` + - *CM_ML_MODEL_FILE*: `retinanet_model_10.pth` + - *CM_ML_MODEL_ACCURACY*: `0.3755` + - Workflow: + * `_weights` + - Environment variables: + - *CM_MODEL_WEIGHTS_FILE*: `yes` + - Workflow: + +
+ + + * Group "**framework**" +
+ Click here to expand this section. + + * **`_onnx`** (default) + - Environment variables: + - *CM_ML_MODEL_DATA_LAYOUT*: `NCHW` + - *CM_ML_MODEL_FRAMEWORK*: `onnx` + - Workflow: + * `_pytorch` + - Environment variables: + - *CM_ML_MODEL_DATA_LAYOUT*: `NCHW` + - *CM_ML_MODEL_FRAMEWORK*: `pytorch` + - Workflow: + +
+ + + * Group "**precision**" +
+ Click here to expand this section. + + * **`_fp32`** (default) + - Environment variables: + - *CM_ML_MODEL_INPUT_DATA_TYPES*: `fp32` + - *CM_ML_MODEL_PRECISION*: `fp32` + - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `fp32` + - Workflow: + +
+ + +#### Default variations + +`_fp32,_onnx` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet/_cm.json)*** + * download-and-extract + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_TMP_ML_MODEL_RETINANET_NO_NMS': ['yes']}` + * CM names: `--adr.['dae']...` + - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) + 1. ***Run native script if exists*** + * [run-no-nms.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet/run-no-nms.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet/_cm.json) + +___ +### Script output +`cmr "get ml-model raw resnext50 retinanet object-detection [,variations]" -j` +#### New environment keys (filter) + +* `<<>>` +* `CM_ML_MODEL_*` +#### New environment keys auto-detected from customize + +* `CM_ML_MODEL_FILE` +* `CM_ML_MODEL_FILE_WITH_PATH` +* `CM_ML_MODEL_RETINANET_QAIC_NODE_PRECISION_INFO_FILE_PATH` \ No newline at end of file diff --git a/docs/AI-ML-models/get-ml-model-rnnt.md b/docs/AI-ML-models/get-ml-model-rnnt.md new file mode 100644 index 0000000000..1d81ace52f --- /dev/null +++ b/docs/AI-ML-models/get-ml-model-rnnt.md @@ -0,0 +1,192 @@ +Automatically generated README for this automation recipe: **get-ml-model-rnnt** + +Category: **AI/ML models** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-rnnt,8858f18b89774d28) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-rnnt)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,ml-model,rnnt,raw,librispeech,speech-recognition* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get ml-model rnnt raw librispeech speech-recognition" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,ml-model,rnnt,raw,librispeech,speech-recognition` + +`cm run script --tags=get,ml-model,rnnt,raw,librispeech,speech-recognition[,variations] ` + +*or* + +`cmr "get ml-model rnnt raw librispeech speech-recognition"` + +`cmr "get ml-model rnnt raw librispeech speech-recognition [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,ml-model,rnnt,raw,librispeech,speech-recognition' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,ml-model,rnnt,raw,librispeech,speech-recognition"``` + +#### Run this script via Docker (beta) + +`cm docker script "get ml-model rnnt raw librispeech speech-recognition[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_pytorch,fp32` + - Environment variables: + - *CM_ML_MODEL_ACCURACY*: `0.07452253714852645` + - *CM_PACKAGE_URL*: `https://zenodo.org/record/3662521/files/DistributedDataParallel_1576581068.9962234-epoch-100.pt?download=1` + - Workflow: + * `_pytorch,fp32,amazon-s3` + - Environment variables: + - *CM_PACKAGE_URL*: `https://mlperf-public.s3.us-west-2.amazonaws.com/DistributedDataParallel_1576581068.9962234-epoch-100.pt` + - Workflow: + * `_pytorch,fp32,zenodo` + - Environment variables: + - *CM_PACKAGE_URL*: `https://zenodo.org/record/3662521/files/DistributedDataParallel_1576581068.9962234-epoch-100.pt?download=1` + - Workflow: + * `_weights` + - Environment variables: + - *CM_MODEL_WEIGHTS_FILE*: `yes` + - Workflow: + +
+ + + * Group "**download-src**" +
+ Click here to expand this section. + + * **`_amazon-s3`** (default) + - Workflow: + * `_zenodo` + - Workflow: + +
+ + + * Group "**framework**" +
+ Click here to expand this section. + + * **`_pytorch`** (default) + - Environment variables: + - *CM_ML_MODEL_FRAMEWORK*: `pytorch` + - Workflow: + +
+ + + * Group "**precision**" +
+ Click here to expand this section. + + * **`_fp32`** (default) + - Environment variables: + - *CM_ML_MODEL_INPUT_DATA_TYPES*: `fp32` + - *CM_ML_MODEL_PRECISION*: `fp32` + - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `fp32` + - Workflow: + +
+ + +#### Default variations + +`_amazon-s3,_fp32,_pytorch` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-rnnt/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-rnnt/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-rnnt/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-rnnt/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-rnnt/_cm.json) + +___ +### Script output +`cmr "get ml-model rnnt raw librispeech speech-recognition [,variations]" -j` +#### New environment keys (filter) + +* `CM_ML_MODEL_*` +#### New environment keys auto-detected from customize + +* `CM_ML_MODEL_FILE` +* `CM_ML_MODEL_FILE_WITH_PATH` +* `CM_ML_MODEL_PATH` \ No newline at end of file diff --git a/docs/AI-ML-models/get-ml-model-stable-diffusion.md b/docs/AI-ML-models/get-ml-model-stable-diffusion.md new file mode 100644 index 0000000000..bf1378d78d --- /dev/null +++ b/docs/AI-ML-models/get-ml-model-stable-diffusion.md @@ -0,0 +1,256 @@ +Automatically generated README for this automation recipe: **get-ml-model-stable-diffusion** + +Category: **AI/ML models** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-stable-diffusion,22c6516b2d4d4c23) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-stable-diffusion)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,raw,ml-model,stable-diffusion,sdxl,text-to-image* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get raw ml-model stable-diffusion sdxl text-to-image" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,raw,ml-model,stable-diffusion,sdxl,text-to-image` + +`cm run script --tags=get,raw,ml-model,stable-diffusion,sdxl,text-to-image[,variations] [--input_flags]` + +*or* + +`cmr "get raw ml-model stable-diffusion sdxl text-to-image"` + +`cmr "get raw ml-model stable-diffusion sdxl text-to-image [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,raw,ml-model,stable-diffusion,sdxl,text-to-image' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,raw,ml-model,stable-diffusion,sdxl,text-to-image"``` + +#### Run this script via Docker (beta) + +`cm docker script "get raw ml-model stable-diffusion sdxl text-to-image[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_batch_size.#` + - Environment variables: + - *CM_ML_MODEL_BATCH_SIZE*: `#` + - Workflow: + * `_pytorch,fp16` + - Workflow: + * `_pytorch,fp32` + - Environment variables: + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://huggingface.co/stabilityai/stable-diffusion-xl-base-1.0` + - Workflow: + * `_rclone,fp16` + - Environment variables: + - *CM_DOWNLOAD_URL*: `mlc-inference:mlcommons-inference-wg-public/stable_diffusion_fp16` + - Workflow: + * `_rclone,fp32` + - Environment variables: + - *CM_DOWNLOAD_URL*: `mlc-inference:mlcommons-inference-wg-public/stable_diffusion_fp32` + - Workflow: + +
+ + + * Group "**download-source**" +
+ Click here to expand this section. + + * `_huggingface` + - Workflow: + * **`_mlcommons`** (default) + - Workflow: + +
+ + + * Group "**download-tool**" +
+ Click here to expand this section. + + * `_git` + - Environment variables: + - *CM_DOWNLOAD_TOOL*: `git` + - Workflow: + * `_rclone` + - Environment variables: + - *CM_RCLONE_CONFIG_CMD*: `rclone config create mlc-inference s3 provider=Cloudflare access_key_id=f65ba5eef400db161ea49967de89f47b secret_access_key=fbea333914c292b854f14d3fe232bad6c5407bf0ab1bebf78833c2b359bdfd2b endpoint=https://c2686074cb2caf5cbaf6d134bdba8b47.r2.cloudflarestorage.com` + - *CM_DOWNLOAD_TOOL*: `rclone` + - Workflow: + * `_wget` + - Environment variables: + - *CM_DOWNLOAD_TOOL*: `wget` + - Workflow: + +
+ + + * Group "**framework**" +
+ Click here to expand this section. + + * **`_pytorch`** (default) + - Environment variables: + - *CM_ML_MODEL_FRAMEWORK*: `pytorch` + - Workflow: + +
+ + + * Group "**precision**" +
+ Click here to expand this section. + + * `_fp16` + - Environment variables: + - *CM_ML_MODEL_INPUT_DATA_TYPES*: `fp16` + - *CM_ML_MODEL_PRECISION*: `fp16` + - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `fp16` + - Workflow: + * **`_fp32`** (default) + - Environment variables: + - *CM_ML_MODEL_INPUT_DATA_TYPES*: `fp32` + - *CM_ML_MODEL_PRECISION*: `fp32` + - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `fp32` + - Workflow: + * `_int8` + - Environment variables: + - *CM_ML_MODEL_INPUT_DATA_TYPES*: `int8` + - *CM_ML_MODEL_PRECISION*: `int8` + - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `int8` + - Workflow: + * `_uint8` + - Environment variables: + - *CM_ML_MODEL_INPUT_DATA_TYPES*: `uint8` + - *CM_ML_MODEL_PRECISION*: `uint8` + - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `uint8` + - Workflow: + +
+ + +#### Default variations + +`_fp32,_mlcommons,_pytorch` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--checkpoint=value` → `SDXL_CHECKPOINT_PATH=value` +* `--download_path=value` → `CM_DOWNLOAD_PATH=value` +* `--to=value` → `CM_DOWNLOAD_PATH=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "checkpoint":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-stable-diffusion/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-stable-diffusion/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-stable-diffusion/_cm.json)*** + * get,ml-model,huggingface,zoo,_clone-repo,_model-stub.stabilityai/stable-diffusion-xl-base-1.0 + * Enable this dependency only if all ENV vars are set:
+`{'CM_TMP_REQUIRE_DOWNLOAD': ['yes'], 'CM_DOWNLOAD_TOOL': ['git']}` + * CM names: `--adr.['hf-zoo']...` + - CM script: [get-ml-model-huggingface-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-huggingface-zoo) + * download-and-extract + * Enable this dependency only if all ENV vars are set:
+`{'CM_TMP_REQUIRE_DOWNLOAD': ['yes'], 'CM_DOWNLOAD_TOOL': ['rclone']}` + * CM names: `--adr.['dae']...` + - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-stable-diffusion/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-stable-diffusion/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-stable-diffusion/_cm.json) + +___ +### Script output +`cmr "get raw ml-model stable-diffusion sdxl text-to-image [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_ML_MODEL_*` +* `SDXL_CHECKPOINT_PATH` +#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-models/get-ml-model-tiny-resnet.md b/docs/AI-ML-models/get-ml-model-tiny-resnet.md new file mode 100644 index 0000000000..b587964351 --- /dev/null +++ b/docs/AI-ML-models/get-ml-model-tiny-resnet.md @@ -0,0 +1,213 @@ +Automatically generated README for this automation recipe: **get-ml-model-tiny-resnet** + +Category: **AI/ML models** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-tiny-resnet,dd5ec11c3f6e49eb) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-tiny-resnet)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,raw,ml-model,resnet,pretrained,tiny,model,ic,ml-model-tiny-resnet,image-classification* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get raw ml-model resnet pretrained tiny model ic ml-model-tiny-resnet image-classification" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,raw,ml-model,resnet,pretrained,tiny,model,ic,ml-model-tiny-resnet,image-classification` + +`cm run script --tags=get,raw,ml-model,resnet,pretrained,tiny,model,ic,ml-model-tiny-resnet,image-classification[,variations] ` + +*or* + +`cmr "get raw ml-model resnet pretrained tiny model ic ml-model-tiny-resnet image-classification"` + +`cmr "get raw ml-model resnet pretrained tiny model ic ml-model-tiny-resnet image-classification [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,raw,ml-model,resnet,pretrained,tiny,model,ic,ml-model-tiny-resnet,image-classification' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,raw,ml-model,resnet,pretrained,tiny,model,ic,ml-model-tiny-resnet,image-classification"``` + +#### Run this script via Docker (beta) + +`cm docker script "get raw ml-model resnet pretrained tiny model ic ml-model-tiny-resnet image-classification[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_batch_size.#` + - Environment variables: + - *CM_ML_MODEL_BATCH_SIZE*: `#` + - Workflow: + * `_tflite,int8` + - Environment variables: + - *CM_PACKAGE_URL*: `https://github.com/mlcommons/tiny/raw/master/benchmark/training/image_classification/trained_models/pretrainedResnet_quant.tflite` + - *CM_DOWNLOAD_CHECKSUM*: `2d6dd48722471313e4c4528249205ae3` + - Workflow: + +
+ + + * Group "**framework**" +
+ Click here to expand this section. + + * `_onnx` + - Environment variables: + - *CM_TMP_ML_MODEL_TF2ONNX*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,python3 + * CM names: `--adr.['python,python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,tiny,model,resnet,_tflite + * CM names: `--adr.['tflite-resnet-model', 'dependent-model']...` + - CM script: [get-ml-model-tiny-resnet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-tiny-resnet) + * get,generic-python-lib,_package.tf2onnx + * CM names: `--adr.['tf2onnx']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * **`_tflite`** (default) + - Environment variables: + - *CM_ML_MODEL_ACCURACY*: `85` + - *CM_ML_MODEL_DATA_LAYOUT*: `NHWC` + - *CM_ML_MODEL_FRAMEWORK*: `tflite` + - *CM_ML_MODEL_GIVEN_CHANNEL_MEANS*: `` + - *CM_ML_MODEL_INPUT_LAYERS*: `` + - *CM_ML_MODEL_INPUT_LAYER_NAME*: `` + - *CM_ML_MODEL_INPUT_SHAPES*: `` + - *CM_ML_MODEL_NORMALIZE_DATA*: `0` + - *CM_ML_MODEL_OUTPUT_LAYERS*: `` + - *CM_ML_MODEL_OUTPUT_LAYER_NAME*: `` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `<<>>` + - *CM_ML_MODEL_SUBTRACT_MEANS*: `YES` + - Workflow: + +
+ + + * Group "**precision**" +
+ Click here to expand this section. + + * `_fp32` + - Environment variables: + - *CM_ML_MODEL_INPUT_DATA_TYPES*: `fp32` + - *CM_ML_MODEL_PRECISION*: `fp32` + - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `fp32` + - Workflow: + * **`_int8`** (default) + - Environment variables: + - *CM_ML_MODEL_INPUT_DATA_TYPES*: `int8` + - *CM_ML_MODEL_PRECISION*: `int8` + - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `int8` + - Workflow: + * `_uint8` + - Environment variables: + - *CM_ML_MODEL_INPUT_DATA_TYPES*: `uint8` + - *CM_ML_MODEL_PRECISION*: `uint8` + - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `uint8` + - Workflow: + +
+ + +#### Default variations + +`_int8,_tflite` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-tiny-resnet/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-tiny-resnet/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-tiny-resnet/_cm.json)*** + * download-and-extract + * Enable this dependency only if all ENV vars are set:
+`{'CM_PACKAGE_URL': ['on']}` + - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-tiny-resnet/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-tiny-resnet/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-tiny-resnet/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-tiny-resnet/_cm.json) + +___ +### Script output +`cmr "get raw ml-model resnet pretrained tiny model ic ml-model-tiny-resnet image-classification [,variations]" -j` +#### New environment keys (filter) + +* `CM_ML_MODEL_*` +#### New environment keys auto-detected from customize + +* `CM_ML_MODEL_FILE` +* `CM_ML_MODEL_FILE_WITH_PATH` \ No newline at end of file diff --git a/docs/AI-ML-models/get-ml-model-using-imagenet-from-model-zoo.md b/docs/AI-ML-models/get-ml-model-using-imagenet-from-model-zoo.md new file mode 100644 index 0000000000..9d979bdb8e --- /dev/null +++ b/docs/AI-ML-models/get-ml-model-using-imagenet-from-model-zoo.md @@ -0,0 +1,147 @@ +Automatically generated README for this automation recipe: **get-ml-model-using-imagenet-from-model-zoo** + +Category: **AI/ML models** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-using-imagenet-from-model-zoo,153e08828c4e45cc) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-using-imagenet-from-model-zoo)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,ml-model,model-zoo,zoo,imagenet,image-classification* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get ml-model model-zoo zoo imagenet image-classification" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,ml-model,model-zoo,zoo,imagenet,image-classification` + +`cm run script --tags=get,ml-model,model-zoo,zoo,imagenet,image-classification[,variations] ` + +*or* + +`cmr "get ml-model model-zoo zoo imagenet image-classification"` + +`cmr "get ml-model model-zoo zoo imagenet image-classification [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,ml-model,model-zoo,zoo,imagenet,image-classification' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,ml-model,model-zoo,zoo,imagenet,image-classification"``` + +#### Run this script via Docker (beta) + +`cm docker script "get ml-model model-zoo zoo imagenet image-classification[variations]" ` + +___ +### Customization + + +#### Variations + + * Group "**model-source**" +
+ Click here to expand this section. + + * `_model.#` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,ml-model,zoo,deepsparse,_model-stub.# + * CM names: `--adr.['neural-magic-zoo-downloader']...` + - CM script: [get-ml-model-neuralmagic-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-neuralmagic-zoo) + * `_model.resnet101-pytorch-base` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,ml-model,zoo,deepsparse,_model-stub.zoo:cv/classification/resnet_v1-101/pytorch/sparseml/imagenet/base-none + * CM names: `--adr.['neural-magic-zoo-downloader']...` + - CM script: [get-ml-model-neuralmagic-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-neuralmagic-zoo) + * `_model.resnet50-pruned95-uniform-quant` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,ml-model,zoo,deepsparse,_model-stub.zoo:cv/classification/resnet_v1-50/pytorch/sparseml/imagenet/pruned95_uniform_quant-none + * CM names: `--adr.['neural-magic-zoo-downloader']...` + - CM script: [get-ml-model-neuralmagic-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-neuralmagic-zoo) + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-using-imagenet-from-model-zoo/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-using-imagenet-from-model-zoo/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-using-imagenet-from-model-zoo/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-using-imagenet-from-model-zoo/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-using-imagenet-from-model-zoo/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-using-imagenet-from-model-zoo/_cm.json) + +___ +### Script output +`cmr "get ml-model model-zoo zoo imagenet image-classification [,variations]" -j` +#### New environment keys (filter) + +* `CM_ML_MODEL*` +#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-models/get-tvm-model.md b/docs/AI-ML-models/get-tvm-model.md new file mode 100644 index 0000000000..61775fb9fa --- /dev/null +++ b/docs/AI-ML-models/get-tvm-model.md @@ -0,0 +1,288 @@ +Automatically generated README for this automation recipe: **get-tvm-model** + +Category: **AI/ML models** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-tvm-model,c1b7b656b6224307) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm-model)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,ml-model-tvm,tvm-model* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get ml-model-tvm tvm-model" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,ml-model-tvm,tvm-model` + +`cm run script --tags=get,ml-model-tvm,tvm-model[,variations] ` + +*or* + +`cmr "get ml-model-tvm tvm-model"` + +`cmr "get ml-model-tvm tvm-model [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,ml-model-tvm,tvm-model' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,ml-model-tvm,tvm-model"``` + +#### Run this script via Docker (beta) + +`cm docker script "get ml-model-tvm tvm-model[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_tune-model` + - Environment variables: + - *CM_TUNE_TVM_MODEL*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_xgboost + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_pandas + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_tornado + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + +
+ + + * Group "**batchsize**" +
+ Click here to expand this section. + + * `_batch_size.#` + - Environment variables: + - *CM_ML_MODEL_MAX_BATCH_SIZE*: `#` + - Workflow: + +
+ + + * Group "**frontend**" +
+ Click here to expand this section. + + * **`_onnx`** (default) + - Environment variables: + - *CM_TVM_FRONTEND_FRAMEWORK*: `onnx` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_onnx + * CM names: `--adr.['onnx']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_pytorch` + - Aliases: `_torch` + - Environment variables: + - *CM_TVM_FRONTEND_FRAMEWORK*: `pytorch` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_torch + * CM names: `--adr.['pytorch', 'torch']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torchvision + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_tensorflow` + - Aliases: `_tf` + - Environment variables: + - *CM_TVM_FRONTEND_FRAMEWORK*: `tensorflow` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_tensorflow + * CM names: `--adr.['tensorflow']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_tflite` + - Environment variables: + - *CM_TVM_FRONTEND_FRAMEWORK*: `tflite` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_tflite + * CM names: `--adr.['tflite']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + +
+ + + * Group "**model**" +
+ Click here to expand this section. + + * `_model.#` + - Environment variables: + - *CM_ML_MODEL*: `#` + - Workflow: + +
+ + + * Group "**precision**" +
+ Click here to expand this section. + + * **`_fp32`** (default) + - Workflow: + * `_int8` + - Workflow: + * `_uint8` + - Workflow: + +
+ + + * Group "**runtime**" +
+ Click here to expand this section. + + * `_graph_executor` + - Environment variables: + - *CM_TVM_USE_VM*: `no` + - Workflow: + * **`_virtual_machine`** (default) + - Environment variables: + - *CM_TVM_USE_VM*: `yes` + - Workflow: + +
+ + +#### Default variations + +`_fp32,_onnx,_virtual_machine` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_ML_MODEL_MAX_BATCH_SIZE: `1` +* CM_TUNE_TVM_MODEL: `no` +* CM_TVM_USE_VM: `yes` +* CM_TVM_FRONTEND_FRAMEWORK: `onnx` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm-model/_cm.json)*** + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,tvm + * CM names: `--adr.['tvm']...` + - CM script: [get-tvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tvm) + * get,generic-python-lib,_decorator + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_psutil + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_scipy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_attrs + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm-model/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm-model/_cm.json)*** + * get,ml-model,raw + * CM names: `--adr.['original-model']...` + - CM script: [get-ml-model-3d-unet-kits19](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-3d-unet-kits19) + - CM script: [get-ml-model-bert-base-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-bert-base-squad) + - CM script: [get-ml-model-bert-large-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-bert-large-squad) + - CM script: [get-ml-model-dlrm-terabyte](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-dlrm-terabyte) + - CM script: [get-ml-model-efficientnet-lite](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-efficientnet-lite) + - CM script: [get-ml-model-gptj](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-gptj) + - CM script: [get-ml-model-huggingface-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-huggingface-zoo) + - CM script: [get-ml-model-llama2](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-llama2) + - CM script: [get-ml-model-mobilenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-mobilenet) + - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) + - CM script: [get-ml-model-retinanet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet) + - CM script: [get-ml-model-rnnt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-rnnt) + - CM script: [get-ml-model-stable-diffusion](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-stable-diffusion) + - CM script: [get-ml-model-tiny-resnet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-tiny-resnet) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm-model/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm-model/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm-model/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm-model/_cm.json) + +___ +### Script output +`cmr "get ml-model-tvm tvm-model [,variations]" -j` +#### New environment keys (filter) + +* `CM_ML_MODEL_*` +* `CM_TUNE_TVM_*` +* `CM_TVM_*` +#### New environment keys auto-detected from customize + +* `CM_ML_MODEL_FILE` +* `CM_ML_MODEL_FILE_WITH_PATH` +* `CM_ML_MODEL_FRAMEWORK` +* `CM_ML_MODEL_INPUT_SHAPES` +* `CM_ML_MODEL_ORIGINAL_FILE_WITH_PATH` +* `CM_ML_MODEL_PATH` +* `CM_TUNE_TVM_MODEL` +* `CM_TVM_FRONTEND_FRAMEWORK` \ No newline at end of file diff --git a/docs/AI-ML-optimization/calibrate-model-for.qaic.md b/docs/AI-ML-optimization/calibrate-model-for.qaic.md new file mode 100644 index 0000000000..9441e45665 --- /dev/null +++ b/docs/AI-ML-optimization/calibrate-model-for.qaic.md @@ -0,0 +1,289 @@ +Automatically generated README for this automation recipe: **calibrate-model-for.qaic** + +Category: **AI/ML optimization** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=calibrate-model-for.qaic,817bad70df2f4e45) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/calibrate-model-for.qaic)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *qaic,calibrate,profile,qaic-profile,qaic-calibrate* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "qaic calibrate profile qaic-profile qaic-calibrate" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=qaic,calibrate,profile,qaic-profile,qaic-calibrate` + +`cm run script --tags=qaic,calibrate,profile,qaic-profile,qaic-calibrate[,variations] ` + +*or* + +`cmr "qaic calibrate profile qaic-profile qaic-calibrate"` + +`cmr "qaic calibrate profile qaic-profile qaic-calibrate [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'qaic,calibrate,profile,qaic-profile,qaic-calibrate' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="qaic,calibrate,profile,qaic-profile,qaic-calibrate"``` + +#### Run this script via Docker (beta) + +`cm docker script "qaic calibrate profile qaic-profile qaic-calibrate[variations]" ` + +___ +### Customization + + +#### Variations + + * *Internal group (variations should not be selected manually)* +
+ Click here to expand this section. + + * `_bert_` + - Environment variables: + - *CM_QAIC_MODEL_NAME*: `bert-large` + - *CM_CREATE_INPUT_BATCH*: `no` + - Workflow: + +
+ + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_first.#` + - Workflow: + * `_resnet50,tf` + - Environment variables: + - *CM_QAIC_MODEL_TO_CONVERT*: `calibrate_resnet50_tf` + - Workflow: + +
+ + + * Group "**batch-size**" +
+ Click here to expand this section. + + * `_bs.#` + - Environment variables: + - *CM_QAIC_MODEL_BATCH_SIZE*: `#` + - *CM_CREATE_INPUT_BATCH*: `yes` + - Workflow: + * `_bs.1` + - Environment variables: + - *CM_QAIC_MODEL_BATCH_SIZE*: `1` + - *CM_CREATE_INPUT_BATCH*: `yes` + - Workflow: + +
+ + + * Group "**calib-dataset-filter-size**" +
+ Click here to expand this section. + + * `_filter-size.#` + - Workflow: + +
+ + + * Group "**calibration-option**" +
+ Click here to expand this section. + + * `_mlperf.option1` + - Workflow: + * `_mlperf.option2` + - Workflow: + +
+ + + * Group "**model**" +
+ Click here to expand this section. + + * `_bert-99` + - Environment variables: + - *CM_CALIBRATE_SQUAD*: `yes` + - *CM_QAIC_COMPILER_ARGS*: `` + - *CM_QAIC_COMPILER_PARAMS*: `-onnx-define-symbol=batch_size,1 -onnx-define-symbol=seg_length,<<>> -input-list-file=<<>> -num-histogram-bins=512 -profiling-threads=<<>>` + - *CM_QAIC_MODEL_TO_CONVERT*: `calibrate_bert_mlperf` + - Workflow: + * `_resnet50` + - Environment variables: + - *CM_QAIC_MODEL_NAME*: `resnet50` + - *CM_CALIBRATE_IMAGENET*: `yes` + - *CM_QAIC_COMPILER_ARGS*: `` + - *CM_QAIC_COMPILER_PARAMS*: `-output-node-name=ArgMax -profiling-threads=<<>>` + - *CM_QAIC_OUTPUT_NODE_NAME*: `-output-node-name=ArgMax` + - *CM_QAIC_MODEL_TO_CONVERT*: `calibrate_resnet50_tf` + - Workflow: + * `_retinanet` + - Environment variables: + - *CM_QAIC_MODEL_NAME*: `retinanet` + - *CM_CALIBRATE_OPENIMAGES*: `yes` + - *CM_QAIC_COMPILER_ARGS*: `` + - *CM_QAIC_COMPILER_PARAMS*: `-enable-channelwise -profiling-threads=<<>> -onnx-define-symbol=batch_size,<<>> -node-precision-info=<<>>` + - *CM_QAIC_MODEL_TO_CONVERT*: `calibrate_retinanet_no_nms_mlperf` + - Workflow: + +
+ + + * Group "**model-framework**" +
+ Click here to expand this section. + + * `_tf` + - Workflow: + +
+ + + * Group "**seq-length**" +
+ Click here to expand this section. + + * `_seq.#` + - Environment variables: + - *CM_DATASET_SQUAD_TOKENIZED_MAX_SEQ_LENGTH*: `#` + - Workflow: + * `_seq.384` + - Environment variables: + - *CM_DATASET_SQUAD_TOKENIZED_MAX_SEQ_LENGTH*: `#` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/calibrate-model-for.qaic/_cm.json)*** + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,qaic,apps,sdk + * CM names: `--adr.['qaic-apps-sdk']...` + - CM script: [get-qaic-apps-sdk](https://github.com/mlcommons/cm4mlops/tree/master/script/get-qaic-apps-sdk) + * get,preprocessed,dataset,_calibration,openimages,_for.retinanet.onnx,_NCHW,_fp32,_custom-annotations + * Enable this dependency only if all ENV vars are set:
+`{'CM_CALIBRATE_OPENIMAGES': ['yes']}` + * CM names: `--adr.['openimages-cal', 'preprocessed-dataset']...` + - CM script: [get-preprocessed-dataset-openimages](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-openimages) + * get,dataset,imagenet,preprocessed,_calibration,_for.resnet50,_float32,_rgb32 + * Enable this dependency only if all ENV vars are set:
+`{'CM_CALIBRATE_IMAGENET': ['yes']}` + * CM names: `--adr.['imagenet-cal', 'preprocessed-calibration-dataset']...` + - CM script: [get-preprocessed-dataset-imagenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-imagenet) + * get,dataset,preprocessed,_calib1,squad,_pickle,_seq-length.384,_packed + * Enable this dependency only if all ENV vars are set:
+`{'CM_CALIBRATE_SQUAD': ['on']}` + * CM names: `--adr.['squad-cal', 'preprocessed-dataset']...` + - CM script: [get-preprocessed-dataset-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-squad) + * get,ml-model + * CM names: `--adr.['model-src']...` + - CM script: [get-ml-model-abtf-ssd-pytorch](https://github.com/mlcommons/cm4abtf/tree/master/script/get-ml-model-abtf-ssd-pytorch) + - CM script: [get-ml-model-3d-unet-kits19](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-3d-unet-kits19) + - CM script: [get-ml-model-bert-base-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-bert-base-squad) + - CM script: [get-ml-model-bert-large-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-bert-large-squad) + - CM script: [get-ml-model-dlrm-terabyte](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-dlrm-terabyte) + - CM script: [get-ml-model-efficientnet-lite](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-efficientnet-lite) + - CM script: [get-ml-model-gptj](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-gptj) + - CM script: [get-ml-model-huggingface-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-huggingface-zoo) + - CM script: [get-ml-model-llama2](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-llama2) + - CM script: [get-ml-model-mobilenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-mobilenet) + - CM script: [get-ml-model-neuralmagic-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-neuralmagic-zoo) + - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) + - CM script: [get-ml-model-retinanet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet) + - CM script: [get-ml-model-retinanet-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet-nvidia) + - CM script: [get-ml-model-rnnt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-rnnt) + - CM script: [get-ml-model-stable-diffusion](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-stable-diffusion) + - CM script: [get-ml-model-tiny-resnet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-tiny-resnet) + - CM script: [get-ml-model-using-imagenet-from-model-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-using-imagenet-from-model-zoo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/calibrate-model-for.qaic/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/calibrate-model-for.qaic/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/calibrate-model-for.qaic/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/calibrate-model-for.qaic/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/calibrate-model-for.qaic/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/calibrate-model-for.qaic/_cm.json) + +___ +### Script output +`cmr "qaic calibrate profile qaic-profile qaic-calibrate [,variations]" -j` +#### New environment keys (filter) + +* `CM_QAIC_MODEL_PROFILE_*` +#### New environment keys auto-detected from customize + +* `CM_QAIC_MODEL_PROFILE_WITH_PATH` \ No newline at end of file diff --git a/docs/AI-ML-optimization/compile-model-for.qaic.md b/docs/AI-ML-optimization/compile-model-for.qaic.md new file mode 100644 index 0000000000..686f6dec26 --- /dev/null +++ b/docs/AI-ML-optimization/compile-model-for.qaic.md @@ -0,0 +1,438 @@ +Automatically generated README for this automation recipe: **compile-model-for.qaic** + +Category: **AI/ML optimization** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=compile-model-for.qaic,3f0f43b5d0304d1c) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-model-for.qaic)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *qaic,compile,model,model-compile,qaic-compile* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "qaic compile model model-compile qaic-compile" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=qaic,compile,model,model-compile,qaic-compile` + +`cm run script --tags=qaic,compile,model,model-compile,qaic-compile[,variations] [--input_flags]` + +*or* + +`cmr "qaic compile model model-compile qaic-compile"` + +`cmr "qaic compile model model-compile qaic-compile [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'qaic,compile,model,model-compile,qaic-compile' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="qaic,compile,model,model-compile,qaic-compile"``` + +#### Run this script via Docker (beta) + +`cm docker script "qaic compile model model-compile qaic-compile[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_bert-99` + - Environment variables: + - *CM_COMPILE_BERT*: `on` + - *CM_QAIC_MODEL_TO_CONVERT*: `calibrate_bert_mlperf` + - *CM_QAIC_MODEL_COMPILER_PARAMS_BASE*: `-aic-hw -aic-hw-version=2.0 -execute-nodes-in-fp16=Add,Div,Erf,Softmax -quantization-schema=symmetric_with_uint8 -quantization-precision=Int8 -quantization-precision-bias=Int32 -vvv -compile-only -onnx-define-symbol=batch_size,1 -onnx-define-symbol=seg_length,384 -multicast-weights -combine-inputs=false -combine-outputs=false` + - *CM_QAIC_MODEL_COMPILER_ARGS*: `` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * calibrate,qaic,_bert-99 + * CM names: `--adr.['bert-profile', 'qaic-profile']...` + - CM script: [calibrate-model-for.qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/calibrate-model-for.qaic) + * `_bert-99,offline` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS*: `-allocator-dealloc-delay=2 -size-split-granularity=1536 -vtcm-working-set-limit-ratio=1` + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=1 -mos=1 -ols=2` + - Workflow: + * `_bert-99,offline,nsp.14` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=1 -mos=1 -ols=3` + - Workflow: + * `_bert-99,offline,nsp.16` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=1 -mos=1 -ols=2` + - Workflow: + * `_bert-99,server` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS*: `-allocator-dealloc-delay=2 -size-split-granularity=1536 -vtcm-working-set-limit-ratio=1` + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=1 -mos=1 -ols=3` + - Workflow: + * `_bert-99,server,nsp.14` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=1 -mos=1 -ols=3` + - Workflow: + * `_bert-99,singlestream` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS*: `` + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=8 -mos=8 -ols=1` + - Workflow: + * `_bert-99,singlestream,nsp.14` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=8 -mos=8 -ols=1` + - Workflow: + * `_bert-99.9` + - Environment variables: + - *CM_COMPILE_BERT*: `on` + - *CM_QAIC_MODEL_TO_CONVERT*: `bert_mlperf` + - *CM_QAIC_MODEL_COMPILER_PARAMS_BASE*: `-aic-hw -aic-hw-version=2.0 -convert-to-fp16 -vvv -compile-only -onnx-define-symbol=batch_size,1 -onnx-define-symbol=seg_length,384 -combine-inputs=false -combine-outputs=false` + - *CM_QAIC_MODEL_COMPILER_ARGS*: `` + - Workflow: + * `_bert-99.9,offline` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=2 -mos=1 -ols=2` + - Workflow: + * `_bert-99.9,offline,nsp.14` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=2 -mos=1 -ols=2` + - Workflow: + * `_bert-99.9,offline,nsp.16` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=2 -mos=1 -ols=2` + - Workflow: + * `_bert-99.9,server` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=2` + - Workflow: + * `_bert-99.9,server,nsp.14` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=2` + - Workflow: + * `_resnet50` + - Environment variables: + - *CM_COMPILE_RESNET*: `on` + - *CM_QAIC_MODEL_TO_CONVERT*: `compile_resnet50_tf` + - *CM_QAIC_MODEL_COMPILER_PARAMS_BASE*: `-aic-hw -aic-hw-version=2.0 -quantization-schema=symmetric_with_uint8 -quantization-precision=Int8 -output-node-name=ArgMax -vvv -compile-only -use-producer-dma=1` + - Workflow: + * `_resnet50,multistream` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS*: `` + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=4 -mos=1 -ols=1` + - Workflow: + * `_resnet50,multistream,nsp.14` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=4` + - Workflow: + * `_resnet50,offline` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS*: `-sdp-cluster-sizes=2,2 -multicast-weights` + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=4 -mos=1,2 -ols=4` + - Workflow: + * `_resnet50,offline,nsp.14` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=4 -mos=1,2 -ols=4` + - Workflow: + * `_resnet50,server` + - Workflow: + * `_resnet50,server,nsp.14` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=4 -ols=4` + - *CM_QAIC_MODEL_COMPILER_ARGS*: `-sdp-cluster-sizes=2,2 -mos=1,2 -multicast-weights` + - Workflow: + * `_resnet50,server,nsp.16` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=4 -ols=4` + - *CM_QAIC_MODEL_COMPILER_ARGS*: `-sdp-cluster-sizes=4,4 -mos=1,4` + - Workflow: + * `_resnet50,singlestream` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS*: `-aic-num-of-instances=1` + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=8 -mos=1 -ols=1` + - Workflow: + * `_resnet50,singlestream,nsp.14` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=8 -mos=1 -ols=1` + - Workflow: + * `_resnet50,tf` + - Environment variables: + - *CM_QAIC_MODEL_TO_CONVERT*: `calibrate_resnet50_tf` + - Workflow: + * `_retinanet` + - Environment variables: + - *CM_COMPILE_RETINANET*: `on` + - *CM_QAIC_MODEL_TO_CONVERT*: `calibrate_retinanet_no_nms_mlperf` + - *CM_QAIC_MODEL_COMPILER_ARGS*: `-aic-enable-depth-first` + - *CM_QAIC_MODEL_COMPILER_PARAMS_BASE*: `-aic-hw -aic-hw-version=2.0 -compile-only -enable-channelwise -onnx-define-symbol=batch_size,1 -node-precision-info=<<>> -quantization-schema-constants=symmetric_with_uint8 -quantization-schema-activations=asymmetric -quantization-calibration=None` + - Workflow: + * `_retinanet,multistream` + - Workflow: + * `_retinanet,nsp.14` + - Workflow: + * `_retinanet,offline` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=1 -mos=1 -ols=1` + - Workflow: + * `_retinanet,offline,nsp.14` + - Workflow: + * `_retinanet,server` + - Workflow: + * `_retinanet,server,nsp.14` + - Workflow: + * `_retinanet,singlestream` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS*: `` + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=8 -mos=1 -ols=1` + - Workflow: + * `_retinanet,singlestream,nsp.14` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=8 -mos=1 -ols=1` + - Workflow: + +
+ + + * Group "**batch-size**" +
+ Click here to expand this section. + + * `_bs.#` + - Environment variables: + - *CM_QAIC_MODEL_BATCH_SIZE*: `#` + - Workflow: + * `_bs.1` + - Environment variables: + - *CM_QAIC_MODEL_BATCH_SIZE*: `1` + - Workflow: + +
+ + + * Group "**calib-dataset-filter-size**" +
+ Click here to expand this section. + + * `_filter-size.#` + - Workflow: + +
+ + + * Group "**mlperf-scenario**" +
+ Click here to expand this section. + + * `_multistream` + - Workflow: + * `_offline` + - Workflow: + * `_server` + - Workflow: + * **`_singlestream`** (default) + - Workflow: + +
+ + + * Group "**model-framework**" +
+ Click here to expand this section. + + * `_tf` + - Workflow: + +
+ + + * Group "**nsp**" +
+ Click here to expand this section. + + * `_nsp.14` + - Workflow: + * `_nsp.16` + - Workflow: + * `_nsp.8` + - Workflow: + * `_nsp.9` + - Workflow: + +
+ + + * Group "**percentile-calibration**" +
+ Click here to expand this section. + + * `_pc.#` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_PERCENTILE_CALIBRATION_VALUE*: `#` + - *CM_QAIC_MODEL_COMPILER_QUANTIZATION_PARAMS*: `-quantization-calibration=Percentile -percentile-calibration-value=<<>>` + - Workflow: + +
+ + + * Group "**quantization**" +
+ Click here to expand this section. + + * `_no-quantized` + - Environment variables: + - *CM_QAIC_MODEL_QUANTIZATION*: `no` + - Workflow: + * **`_quantized`** (default) + - Environment variables: + - *CM_QAIC_MODEL_QUANTIZATION*: `yes` + - Workflow: + +
+ + +#### Default variations + +`_quantized,_singlestream` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--register=value` → `CM_REGISTER_CACHE=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "register":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-model-for.qaic/_cm.json)*** + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,qaic,apps,sdk + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_REGISTER_CACHE': ['on']}` + * CM names: `--adr.['qaic-apps-sdk']...` + - CM script: [get-qaic-apps-sdk](https://github.com/mlcommons/cm4mlops/tree/master/script/get-qaic-apps-sdk) + * qaic,calibrate,_retinanet + * Enable this dependency only if all ENV vars are set:
+`{'CM_COMPILE_RETINANET': ['yes']}` + * CM names: `--adr.['retinanet-profile', 'qaic-profile']...` + - CM script: [calibrate-model-for.qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/calibrate-model-for.qaic) + * qaic,calibrate,_resnet50 + * Enable this dependency only if all ENV vars are set:
+`{'CM_COMPILE_RESNET': ['on']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_REGISTER_CACHE': ['on']}` + * CM names: `--adr.['resnet-profile', 'qaic-profile']...` + - CM script: [calibrate-model-for.qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/calibrate-model-for.qaic) + * get,ml-model + * CM names: `--adr.['model-src']...` + - CM script: [get-ml-model-abtf-ssd-pytorch](https://github.com/mlcommons/cm4abtf/tree/master/script/get-ml-model-abtf-ssd-pytorch) + - CM script: [get-ml-model-3d-unet-kits19](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-3d-unet-kits19) + - CM script: [get-ml-model-bert-base-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-bert-base-squad) + - CM script: [get-ml-model-bert-large-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-bert-large-squad) + - CM script: [get-ml-model-dlrm-terabyte](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-dlrm-terabyte) + - CM script: [get-ml-model-efficientnet-lite](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-efficientnet-lite) + - CM script: [get-ml-model-gptj](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-gptj) + - CM script: [get-ml-model-huggingface-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-huggingface-zoo) + - CM script: [get-ml-model-llama2](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-llama2) + - CM script: [get-ml-model-mobilenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-mobilenet) + - CM script: [get-ml-model-neuralmagic-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-neuralmagic-zoo) + - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) + - CM script: [get-ml-model-retinanet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet) + - CM script: [get-ml-model-retinanet-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet-nvidia) + - CM script: [get-ml-model-rnnt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-rnnt) + - CM script: [get-ml-model-stable-diffusion](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-stable-diffusion) + - CM script: [get-ml-model-tiny-resnet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-tiny-resnet) + - CM script: [get-ml-model-using-imagenet-from-model-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-using-imagenet-from-model-zoo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-model-for.qaic/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-model-for.qaic/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-model-for.qaic/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-model-for.qaic/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-model-for.qaic/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-model-for.qaic/_cm.json) + +___ +### Script output +`cmr "qaic compile model model-compile qaic-compile [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_ML_MODEL_FILE_WITH_PATH` +* `CM_QAIC_MODEL*` +#### New environment keys auto-detected from customize + +* `CM_ML_MODEL_FILE_WITH_PATH` +* `CM_QAIC_MODEL_COMPILED_BINARY_WITH_PATH` +* `CM_QAIC_MODEL_FINAL_COMPILATION_CMD` \ No newline at end of file diff --git a/docs/AI-ML-optimization/prune-bert-models.md b/docs/AI-ML-optimization/prune-bert-models.md new file mode 100644 index 0000000000..b491bf9cfa --- /dev/null +++ b/docs/AI-ML-optimization/prune-bert-models.md @@ -0,0 +1,185 @@ +Automatically generated README for this automation recipe: **prune-bert-models** + +Category: **AI/ML optimization** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=prune-bert-models,76182d4896414216) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-bert-models)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *prune,bert-models,bert-prune,prune-bert-models* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "prune bert-models bert-prune prune-bert-models" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=prune,bert-models,bert-prune,prune-bert-models` + +`cm run script --tags=prune,bert-models,bert-prune,prune-bert-models[,variations] [--input_flags]` + +*or* + +`cmr "prune bert-models bert-prune prune-bert-models"` + +`cmr "prune bert-models bert-prune prune-bert-models [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'prune,bert-models,bert-prune,prune-bert-models' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="prune,bert-models,bert-prune,prune-bert-models"``` + +#### Run this script via Docker (beta) + +`cm docker script "prune bert-models bert-prune prune-bert-models[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_model.#` + - Environment variables: + - *CM_BERT_PRUNE_MODEL_NAME*: `#` + - *CM_MODEL_ZOO_STUB*: `#` + - Workflow: + * `_path.#` + - Environment variables: + - *CM_BERT_PRUNE_CKPT_PATH*: `#` + - Workflow: + * `_task.#` + - Environment variables: + - *CM_BERT_PRUNE_TASK*: `#` + - Workflow: + +
+ + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--constraint=value` → `CM_BERT_PRUNE_CONSTRAINT=value` +* `--output_dir=value` → `CM_BERT_PRUNE_OUTPUT_DIR=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "constraint":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_BERT_PRUNE_TASK: `squad` +* CM_BERT_PRUNE_MODEL_NAME: `bert-large-uncased` +* CM_MODEL_ZOO_STUB: `bert-large-uncased` +* CM_BERT_PRUNE_CONSTRAINT: `0.5` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-bert-models/_cm.json)*** + * get,python3 + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-python-lib,_numpy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_scipy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_cupy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_tqdm + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torch_cuda + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_datasets + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_transformers + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_scikit-learn + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,git,repo,_repo.https://github.com/cknowledge/retraining-free-pruning + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + * get,ml-model,model,zoo,model-zoo,huggingface,_prune + * CM names: `--adr.['get-model']...` + - CM script: [get-ml-model-huggingface-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-huggingface-zoo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-bert-models/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-bert-models/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-bert-models/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-bert-models/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-bert-models/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-bert-models/_cm.json) + +___ +### Script output +`cmr "prune bert-models bert-prune prune-bert-models [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/CM-interface-prototyping/test-mlperf-inference-retinanet.md b/docs/CM-interface-prototyping/test-mlperf-inference-retinanet.md new file mode 100644 index 0000000000..76c94e570e --- /dev/null +++ b/docs/CM-interface-prototyping/test-mlperf-inference-retinanet.md @@ -0,0 +1,135 @@ +Automatically generated README for this automation recipe: **test-mlperf-inference-retinanet** + +Category: **CM interface prototyping** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=test-mlperf-inference-retinanet,1cedbc3b642a403a) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-mlperf-inference-retinanet)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *test,mlperf-inference-win,retinanet,windows* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "test mlperf-inference-win retinanet windows" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=test,mlperf-inference-win,retinanet,windows` + +`cm run script --tags=test,mlperf-inference-win,retinanet,windows ` + +*or* + +`cmr "test mlperf-inference-win retinanet windows"` + +`cmr "test mlperf-inference-win retinanet windows " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'test,mlperf-inference-win,retinanet,windows' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="test,mlperf-inference-win,retinanet,windows"``` + +#### Run this script via Docker (beta) + +`cm docker script "test mlperf-inference-win retinanet windows" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-mlperf-inference-retinanet/_cm.json)*** + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-python-lib,_requests + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,loadgen + * CM names: `--adr.['loadgen', 'mlperf-inference-loadgen']...` + - CM script: [get-mlperf-inference-loadgen](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-loadgen) + * mlperf,inference,source + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,dataset,open-images,original + - CM script: [get-dataset-openimages](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openimages) + * get,raw,ml-model,retinanet + - CM script: [get-ml-model-retinanet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-mlperf-inference-retinanet/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-mlperf-inference-retinanet/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-mlperf-inference-retinanet/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-mlperf-inference-retinanet/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-mlperf-inference-retinanet/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-mlperf-inference-retinanet/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-mlperf-inference-retinanet/_cm.json) + +___ +### Script output +`cmr "test mlperf-inference-win retinanet windows " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/CUDA-automation/get-cuda-devices.md b/docs/CUDA-automation/get-cuda-devices.md new file mode 100644 index 0000000000..931e10be88 --- /dev/null +++ b/docs/CUDA-automation/get-cuda-devices.md @@ -0,0 +1,122 @@ +Automatically generated README for this automation recipe: **get-cuda-devices** + +Category: **CUDA automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-cuda-devices,7a3ede4d3558427a) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda-devices)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,cuda-devices* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get cuda-devices" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,cuda-devices` + +`cm run script --tags=get,cuda-devices ` + +*or* + +`cmr "get cuda-devices"` + +`cmr "get cuda-devices " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,cuda-devices' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,cuda-devices"``` + +#### Run this script via Docker (beta) + +`cm docker script "get cuda-devices" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda-devices/_cm.json)*** + * get,cuda,_toolkit + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda-devices/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda-devices/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda-devices/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda-devices/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda-devices/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda-devices/_cm.json) + +___ +### Script output +`cmr "get cuda-devices " -j` +#### New environment keys (filter) + +* `CM_CUDA_DEVICE_*` +#### New environment keys auto-detected from customize diff --git a/docs/CUDA-automation/get-cuda.md b/docs/CUDA-automation/get-cuda.md new file mode 100644 index 0000000000..d0b49f561c --- /dev/null +++ b/docs/CUDA-automation/get-cuda.md @@ -0,0 +1,230 @@ +Automatically generated README for this automation recipe: **get-cuda** + +Category: **CUDA automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-cuda,46d133d9ef92422d) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- + +# System dependencies + +* Download [CUDA toolkit](https://developer.nvidia.com/cuda-toolkit). +* Download [cuDNN](https://developer.nvidia.com/rdp/cudnn-download). +* Download [TensorRT](https://developer.nvidia.com/nvidia-tensorrt-8x-download). + + + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,cuda,cuda-compiler,cuda-lib,toolkit,lib,nvcc,get-nvcc,get-cuda* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get cuda cuda-compiler cuda-lib toolkit lib nvcc get-nvcc get-cuda" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,cuda,cuda-compiler,cuda-lib,toolkit,lib,nvcc,get-nvcc,get-cuda` + +`cm run script --tags=get,cuda,cuda-compiler,cuda-lib,toolkit,lib,nvcc,get-nvcc,get-cuda[,variations] [--input_flags]` + +*or* + +`cmr "get cuda cuda-compiler cuda-lib toolkit lib nvcc get-nvcc get-cuda"` + +`cmr "get cuda cuda-compiler cuda-lib toolkit lib nvcc get-nvcc get-cuda [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,cuda,cuda-compiler,cuda-lib,toolkit,lib,nvcc,get-nvcc,get-cuda' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,cuda,cuda-compiler,cuda-lib,toolkit,lib,nvcc,get-nvcc,get-cuda"``` + +#### Run this script via Docker (beta) + +`cm docker script "get cuda cuda-compiler cuda-lib toolkit lib nvcc get-nvcc get-cuda[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_cudnn` + - Environment variables: + - *CM_CUDA_NEEDS_CUDNN*: `yes` + - Workflow: + 1. ***Read "post_deps" on other CM scripts*** + * get,nvidia,cudnn + * CM names: `--adr.['cudnn']...` + - CM script: [get-cudnn](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cudnn) + * `_package-manager` + - Environment variables: + - *CM_CUDA_PACKAGE_MANAGER_INSTALL*: `yes` + - Workflow: + +
+ + + * Group "**installation-mode**" +
+ Click here to expand this section. + + * `_lib-only` + - Environment variables: + - *CM_CUDA_FULL_TOOLKIT_INSTALL*: `no` + - *CM_TMP_FILE_TO_CHECK_UNIX*: `libcudart.so` + - *CM_TMP_FILE_TO_CHECK_WINDOWS*: `libcudart.dll` + - Workflow: + * **`_toolkit`** (default) + - Environment variables: + - *CM_CUDA_FULL_TOOLKIT_INSTALL*: `yes` + - *CM_TMP_FILE_TO_CHECK_UNIX*: `nvcc` + - *CM_TMP_FILE_TO_CHECK_WINDOWS*: `nvcc.exe` + - Workflow: + +
+ + +#### Default variations + +`_toolkit` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--cudnn_tar_file=value` → `CM_CUDNN_TAR_FILE_PATH=value` +* `--cudnn_tar_path=value` → `CM_CUDNN_TAR_FILE_PATH=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "cudnn_tar_file":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_CUDA_PATH_LIB_CUDNN_EXISTS: `no` +* CM_REQUIRE_INSTALL: `no` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,cl + * Enable this dependency only if all ENV vars are set:
+`{'CM_CUDA_FULL_TOOLKIT_INSTALL': ['yes'], 'CM_HOST_OS_TYPE': ['windows']}` + * CM names: `--adr.['compiler']...` + - CM script: [get-cl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cl) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda/_cm.json)*** + * install,cuda,prebuilt + * Enable this dependency only if all ENV vars are set:
+`{'CM_REQUIRE_INSTALL': ['yes']}` + * CM names: `--adr.['install-cuda-prebuilt']...` + - CM script: [install-cuda-prebuilt](https://github.com/mlcommons/cm4mlops/tree/master/script/install-cuda-prebuilt) + * get,generic-sys-util,_nvidia-cuda-toolkit + * Enable this dependency only if all ENV vars are set:
+`{'CM_CUDA_PACKAGE_MANAGER_INSTALL': ['yes']}` + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda/_cm.json) + +___ +### Script output +`cmr "get cuda cuda-compiler cuda-lib toolkit lib nvcc get-nvcc get-cuda [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `+ LDFLAGS` +* `+CPLUS_INCLUDE_PATH` +* `+C_INCLUDE_PATH` +* `+DYLD_FALLBACK_LIBRARY_PATH` +* `+LD_LIBRARY_PATH` +* `+PATH` +* `CM_CUDA_*` +* `CM_NVCC_*` +* `CUDA_HOME` +* `CUDA_PATH` +#### New environment keys auto-detected from customize + +* `CM_CUDA_CACHE_TAGS` +* `CM_CUDA_FULL_TOOLKIT_INSTALL` +* `CM_CUDA_INSTALLED_PATH` +* `CM_CUDA_PATH_BIN` +* `CM_CUDA_PATH_INCLUDE` +* `CM_CUDA_PATH_LIB` +* `CM_CUDA_VERSION` +* `CM_CUDA_VERSION_STRING` +* `CM_NVCC_BIN` \ No newline at end of file diff --git a/docs/CUDA-automation/get-cudnn.md b/docs/CUDA-automation/get-cudnn.md new file mode 100644 index 0000000000..224fb26410 --- /dev/null +++ b/docs/CUDA-automation/get-cudnn.md @@ -0,0 +1,167 @@ +Automatically generated README for this automation recipe: **get-cudnn** + +Category: **CUDA automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-cudnn,d73ee19baee14df8) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cudnn)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,cudnn,nvidia* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get cudnn nvidia" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,cudnn,nvidia` + +`cm run script --tags=get,cudnn,nvidia [--input_flags]` + +*or* + +`cmr "get cudnn nvidia"` + +`cmr "get cudnn nvidia " [--input_flags]` + + + +#### Input Flags + +* --**input**=Full path to the installed cuDNN library +* --**tar_file**=Full path to the cuDNN Tar file downloaded from Nvidia website (https://developer.nvidia.com/cudnn) + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "input":...} +``` +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,cudnn,nvidia' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,cudnn,nvidia"``` + +#### Run this script via Docker (beta) + +`cm docker script "get cudnn nvidia" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--input=value` → `CM_INPUT=value` +* `--tar_file=value` → `CM_CUDNN_TAR_FILE_PATH=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "input":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_SUDO: `sudo` +* CM_INPUT: `` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cudnn/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,cuda + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_CUDA_PATH_LIB': ['on'], 'CM_CUDA_PATH_INCLUDE': ['on']}` + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cudnn/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cudnn/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cudnn/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cudnn/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cudnn/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cudnn/_cm.json) + +___ +### Script output +`cmr "get cudnn nvidia " [--input_flags] -j` +#### New environment keys (filter) + +* `+CPLUS_INCLUDE_PATH` +* `+C_INCLUDE_PATH` +* `+DYLD_FALLBACK_LIBRARY_PATH` +* `+LD_LIBRARY_PATH` +* `+PATH` +* `CM_CUDA_PATH_INCLUDE_CUDNN` +* `CM_CUDA_PATH_LIB_CUDNN` +* `CM_CUDA_PATH_LIB_CUDNN_EXISTS` +* `CM_CUDNN_*` +#### New environment keys auto-detected from customize + +* `CM_CUDA_PATH_INCLUDE_CUDNN` +* `CM_CUDA_PATH_LIB_CUDNN` +* `CM_CUDA_PATH_LIB_CUDNN_EXISTS` +* `CM_CUDNN_VERSION` \ No newline at end of file diff --git a/docs/CUDA-automation/get-tensorrt.md b/docs/CUDA-automation/get-tensorrt.md new file mode 100644 index 0000000000..07153e1538 --- /dev/null +++ b/docs/CUDA-automation/get-tensorrt.md @@ -0,0 +1,176 @@ +Automatically generated README for this automation recipe: **get-tensorrt** + +Category: **CUDA automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-tensorrt,2a84ca505e4c408d) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tensorrt)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,tensorrt,nvidia* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get tensorrt nvidia" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,tensorrt,nvidia` + +`cm run script --tags=get,tensorrt,nvidia[,variations] [--input_flags]` + +*or* + +`cmr "get tensorrt nvidia"` + +`cmr "get tensorrt nvidia [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + + +#### Input Flags + +* --**input**=Full path to the installed TensorRT library (nvinfer) +* --**tar_file**=Full path to the TensorRT Tar file downloaded from the Nvidia website (https://developer.nvidia.com/tensorrt) + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "input":...} +``` +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,tensorrt,nvidia' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,tensorrt,nvidia"``` + +#### Run this script via Docker (beta) + +`cm docker script "get tensorrt nvidia[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_dev` + - Environment variables: + - *CM_TENSORRT_REQUIRE_DEV*: `yes` + - Workflow: + +
+ + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--input=value` → `CM_INPUT=value` +* `--tar_file=value` → `CM_TENSORRT_TAR_FILE_PATH=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "input":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tensorrt/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tensorrt/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tensorrt/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tensorrt/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tensorrt/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tensorrt/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tensorrt/_cm.json) + +___ +### Script output +`cmr "get tensorrt nvidia [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `+ LDFLAGS` +* `+CPLUS_INCLUDE_PATH` +* `+C_INCLUDE_PATH` +* `+DYLD_FALLBACK_LIBRARY_PATH` +* `+LD_LIBRARY_PATH` +* `+PATH` +* `CM_TENSORRT_*` +#### New environment keys auto-detected from customize + +* `CM_TENSORRT_INSTALL_PATH` +* `CM_TENSORRT_LIB_PATH` +* `CM_TENSORRT_VERSION` \ No newline at end of file diff --git a/docs/CUDA-automation/install-cuda-package-manager.md b/docs/CUDA-automation/install-cuda-package-manager.md new file mode 100644 index 0000000000..e08286c942 --- /dev/null +++ b/docs/CUDA-automation/install-cuda-package-manager.md @@ -0,0 +1,124 @@ +Automatically generated README for this automation recipe: **install-cuda-package-manager** + +Category: **CUDA automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-cuda-package-manager,c1afdff8542f45be) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-package-manager)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,package-manager,cuda,package-manager-cuda,install-pm-cuda* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install package-manager cuda package-manager-cuda install-pm-cuda" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,package-manager,cuda,package-manager-cuda,install-pm-cuda` + +`cm run script --tags=install,package-manager,cuda,package-manager-cuda,install-pm-cuda ` + +*or* + +`cmr "install package-manager cuda package-manager-cuda install-pm-cuda"` + +`cmr "install package-manager cuda package-manager-cuda install-pm-cuda " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,package-manager,cuda,package-manager-cuda,install-pm-cuda' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,package-manager,cuda,package-manager-cuda,install-pm-cuda"``` + +#### Run this script via Docker (beta) + +`cm docker script "install package-manager cuda package-manager-cuda install-pm-cuda" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-package-manager/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-package-manager/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-package-manager/_cm.json) + 1. ***Run native script if exists*** + * [run-ubuntu.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-package-manager/run-ubuntu.sh) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-package-manager/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-package-manager/_cm.json) + 1. Run "postrocess" function from customize.py + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-package-manager/_cm.json)*** + * get,cuda + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_REQUIRE_INSTALL': ['yes']}` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + +___ +### Script output +`cmr "install package-manager cuda package-manager-cuda install-pm-cuda " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/CUDA-automation/install-cuda-prebuilt.md b/docs/CUDA-automation/install-cuda-prebuilt.md new file mode 100644 index 0000000000..16a3fe50bd --- /dev/null +++ b/docs/CUDA-automation/install-cuda-prebuilt.md @@ -0,0 +1,180 @@ +Automatically generated README for this automation recipe: **install-cuda-prebuilt** + +Category: **CUDA automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-cuda-prebuilt,14eadcd42ba340c3) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-prebuilt)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,prebuilt,cuda,prebuilt-cuda,install-prebuilt-cuda* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install prebuilt cuda prebuilt-cuda install-prebuilt-cuda" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,prebuilt,cuda,prebuilt-cuda,install-prebuilt-cuda` + +`cm run script --tags=install,prebuilt,cuda,prebuilt-cuda,install-prebuilt-cuda[,variations] [--input_flags]` + +*or* + +`cmr "install prebuilt cuda prebuilt-cuda install-prebuilt-cuda"` + +`cmr "install prebuilt cuda prebuilt-cuda install-prebuilt-cuda [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,prebuilt,cuda,prebuilt-cuda,install-prebuilt-cuda' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,prebuilt,cuda,prebuilt-cuda,install-prebuilt-cuda"``` + +#### Run this script via Docker (beta) + +`cm docker script "install prebuilt cuda prebuilt-cuda install-prebuilt-cuda[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * Group "**install-driver**" +
+ Click here to expand this section. + + * `_driver` + - Environment variables: + - *CM_CUDA_INSTALL_DRIVER*: `yes` + - Workflow: + * **`_no-driver`** (default) + - Environment variables: + - *CM_CUDA_INSTALL_DRIVER*: `no` + - Workflow: + +
+ + +#### Default variations + +`_no-driver` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--local_run_file_path=value` → `CUDA_RUN_FILE_LOCAL_PATH=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "local_run_file_path":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_SUDO: `sudo` + +
+ +#### Versions +Default version: `11.8.0` + +* `11.7.0` +* `11.8.0` +* `12.0.0` +* `12.1.1` +* `12.2.0` +* `12.3.2` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-prebuilt/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-prebuilt/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-prebuilt/_cm.json)*** + * download,file + * CM names: `--adr.['download-script']...` + - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-prebuilt/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-prebuilt/_cm.json) + 1. Run "postrocess" function from customize.py + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-prebuilt/_cm.json)*** + * get,cuda + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_REQUIRE_INSTALL': ['yes']}` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + +___ +### Script output +`cmr "install prebuilt cuda prebuilt-cuda install-prebuilt-cuda [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_CUDA_*` +* `CM_NVCC_*` +#### New environment keys auto-detected from customize + +* `CM_CUDA_INSTALLED_PATH` +* `CM_NVCC_BIN_WITH_PATH` \ No newline at end of file diff --git a/docs/Cloud-automation/destroy-terraform.md b/docs/Cloud-automation/destroy-terraform.md new file mode 100644 index 0000000000..514106a78b --- /dev/null +++ b/docs/Cloud-automation/destroy-terraform.md @@ -0,0 +1,121 @@ +Automatically generated README for this automation recipe: **destroy-terraform** + +Category: **Cloud automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=destroy-terraform,3463458d03054856) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/destroy-terraform)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *destroy,terraform,cmd* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "destroy terraform cmd" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=destroy,terraform,cmd` + +`cm run script --tags=destroy,terraform,cmd ` + +*or* + +`cmr "destroy terraform cmd"` + +`cmr "destroy terraform cmd " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'destroy,terraform,cmd' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="destroy,terraform,cmd"``` + +#### Run this script via Docker (beta) + +`cm docker script "destroy terraform cmd" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/destroy-terraform/_cm.json)*** + * get,terraform + * CM names: `--adr.['terraform']...` + - CM script: [get-terraform](https://github.com/mlcommons/cm4mlops/tree/master/script/get-terraform) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/destroy-terraform/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/destroy-terraform/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/destroy-terraform/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/destroy-terraform/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/destroy-terraform/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/destroy-terraform/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/destroy-terraform/_cm.json) + +___ +### Script output +`cmr "destroy terraform cmd " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Cloud-automation/get-aws-cli.md b/docs/Cloud-automation/get-aws-cli.md new file mode 100644 index 0000000000..52bc80bfa1 --- /dev/null +++ b/docs/Cloud-automation/get-aws-cli.md @@ -0,0 +1,125 @@ +Automatically generated README for this automation recipe: **get-aws-cli** + +Category: **Cloud automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-aws-cli,dad67944229942a3) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aws-cli)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,aws-cli,aws,cli* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get aws-cli aws cli" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,aws-cli,aws,cli` + +`cm run script --tags=get,aws-cli,aws,cli ` + +*or* + +`cmr "get aws-cli aws cli"` + +`cmr "get aws-cli aws cli " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,aws-cli,aws,cli' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,aws-cli,aws,cli"``` + +#### Run this script via Docker (beta) + +`cm docker script "get aws-cli aws cli" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aws-cli/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aws-cli/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aws-cli/_cm.json)*** + * install,aws-cli + * Enable this dependency only if all ENV vars are set:
+`{'CM_REQUIRE_INSTALL': ['yes']}` + - CM script: [install-aws-cli](https://github.com/mlcommons/cm4mlops/tree/master/script/install-aws-cli) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aws-cli/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aws-cli/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aws-cli/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aws-cli/_cm.json) + +___ +### Script output +`cmr "get aws-cli aws cli " -j` +#### New environment keys (filter) + +* `CM_AWS_*` +#### New environment keys auto-detected from customize + +* `CM_AWS_CACHE_TAGS` +* `CM_AWS_INSTALLED_PATH` \ No newline at end of file diff --git a/docs/Cloud-automation/get-terraform.md b/docs/Cloud-automation/get-terraform.md new file mode 100644 index 0000000000..22b001ca80 --- /dev/null +++ b/docs/Cloud-automation/get-terraform.md @@ -0,0 +1,126 @@ +Automatically generated README for this automation recipe: **get-terraform** + +Category: **Cloud automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-terraform,66b33c38a4d7461e) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-terraform)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,terraform,get-terraform* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get terraform get-terraform" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,terraform,get-terraform` + +`cm run script --tags=get,terraform,get-terraform ` + +*or* + +`cmr "get terraform get-terraform"` + +`cmr "get terraform get-terraform " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,terraform,get-terraform' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,terraform,get-terraform"``` + +#### Run this script via Docker (beta) + +`cm docker script "get terraform get-terraform" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-terraform/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-terraform/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-terraform/_cm.json)*** + * install,terraform + * Enable this dependency only if all ENV vars are set:
+`{'CM_REQUIRE_INSTALL': ['yes']}` + - CM script: [install-terraform-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-terraform-from-src) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-terraform/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-terraform/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-terraform/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-terraform/_cm.json) + +___ +### Script output +`cmr "get terraform get-terraform " -j` +#### New environment keys (filter) + +* `+PATH` +* `CM_TERRAFORM_*` +#### New environment keys auto-detected from customize + +* `CM_TERRAFORM_CACHE_TAGS` +* `CM_TERRAFORM_INSTALLED_PATH` \ No newline at end of file diff --git a/docs/Cloud-automation/install-aws-cli.md b/docs/Cloud-automation/install-aws-cli.md new file mode 100644 index 0000000000..d142c7c071 --- /dev/null +++ b/docs/Cloud-automation/install-aws-cli.md @@ -0,0 +1,123 @@ +Automatically generated README for this automation recipe: **install-aws-cli** + +Category: **Cloud automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-aws-cli,4d3efd333c3f4d36) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-aws-cli)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,script,aws-cli,aws,cli* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install script aws-cli aws cli" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,script,aws-cli,aws,cli` + +`cm run script --tags=install,script,aws-cli,aws,cli ` + +*or* + +`cmr "install script aws-cli aws cli"` + +`cmr "install script aws-cli aws cli " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,script,aws-cli,aws,cli' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,script,aws-cli,aws,cli"``` + +#### Run this script via Docker (beta) + +`cm docker script "install script aws-cli aws cli" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-aws-cli/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-aws-cli/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-aws-cli/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-aws-cli/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-aws-cli/_cm.json) + 1. Run "postrocess" function from customize.py + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-aws-cli/_cm.json)*** + * get,aws-cli + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_REQUIRE_INSTALL': ['yes']}` + - CM script: [get-aws-cli](https://github.com/mlcommons/cm4mlops/tree/master/script/get-aws-cli) + +___ +### Script output +`cmr "install script aws-cli aws cli " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Cloud-automation/install-terraform-from-src.md b/docs/Cloud-automation/install-terraform-from-src.md new file mode 100644 index 0000000000..0d7c835311 --- /dev/null +++ b/docs/Cloud-automation/install-terraform-from-src.md @@ -0,0 +1,130 @@ +Automatically generated README for this automation recipe: **install-terraform-from-src** + +Category: **Cloud automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-terraform-from-src,d79d47a074f34428) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-terraform-from-src)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,terraform,from-src* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install terraform from-src" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,terraform,from-src` + +`cm run script --tags=install,terraform,from-src ` + +*or* + +`cmr "install terraform from-src"` + +`cmr "install terraform from-src " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,terraform,from-src' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,terraform,from-src"``` + +#### Run this script via Docker (beta) + +`cm docker script "install terraform from-src" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `main` + +* `main` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-terraform-from-src/_cm.json)*** + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,tool,go + - CM script: [get-go](https://github.com/mlcommons/cm4mlops/tree/master/script/get-go) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-terraform-from-src/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-terraform-from-src/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-terraform-from-src/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-terraform-from-src/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-terraform-from-src/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-terraform-from-src/_cm.json) + +___ +### Script output +`cmr "install terraform from-src " -j` +#### New environment keys (filter) + +* `+PATH` +* `CM_TERRAFORM_*` +#### New environment keys auto-detected from customize + +* `CM_TERRAFORM_BIN_WITH_PATH` +* `CM_TERRAFORM_INSTALLED_PATH` \ No newline at end of file diff --git a/docs/Cloud-automation/run-terraform.md b/docs/Cloud-automation/run-terraform.md new file mode 100644 index 0000000000..7e5699478e --- /dev/null +++ b/docs/Cloud-automation/run-terraform.md @@ -0,0 +1,481 @@ +Automatically generated README for this automation recipe: **run-terraform** + +Category: **Cloud automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=run-terraform,ec344bd44af144d7) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- + +## Setup for Google Cloud Instances +``` +sudo snap install google-cloud-cli --classic +gcloud auth application-default login +``` + +The above two commands will install google-cloud-cli and authorizes the user to access it. Once done, you can start creating gcp instance using CM commands like below. To destroy an instance just repeat the same command with `--destroy` option. + +``` +cm run script --tags=run,terraform,_gcp,_gcp_project.mlperf-inference-tests --cminit +``` +Here, `mlperf-inference-tests` is the name of the google project as created in [Google cloud console](https://console.cloud.google.com/apis/dashboard) + + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-terraform)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *run,terraform* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "run terraform" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=run,terraform` + +`cm run script --tags=run,terraform[,variations] [--input_flags]` + +*or* + +`cmr "run terraform"` + +`cmr "run terraform [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'run,terraform' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="run,terraform"``` + +#### Run this script via Docker (beta) + +`cm docker script "run terraform[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_amazon-linux-2-kernel.#` + - Environment variables: + - *TF_VAR_INSTANCE_IMAGE_OS*: `amazon-linux-2-kernel.#` + - Workflow: + * `_graviton` + - Environment variables: + - *CM_TERRAFORM_AWS_GRAVITON_INSTANCE*: `yes` + - Workflow: + * `_inferentia` + - Environment variables: + - *CM_TERRAFORM_AWS_INFERENTIA_INSTANCE*: `yes` + - Workflow: + * `_inferentia,amazon-linux-2-kernel.510` + - Workflow: + * `_rhel.#` + - Environment variables: + - *TF_VAR_INSTANCE_IMAGE_OS*: `rhel.#` + - Workflow: + * `_ubuntu.#` + - Environment variables: + - *TF_VAR_INSTANCE_IMAGE_OS*: `ubuntu.#` + - Workflow: + +
+ + + * Group "**aws-instance-image**" +
+ Click here to expand this section. + + * `_amazon-linux-2-kernel.510,arm64,us-west-2` + - Environment variables: + - *TF_VAR_INSTANCE_IMAGE*: `ami-0f1a5f5ada0e7da53` + - Workflow: + * `_aws_instance_image.#` + - Environment variables: + - *TF_VAR_INSTANCE_IMAGE*: `#` + - Workflow: + * `_aws_instance_image.ami-0735c191cf914754d` + - Environment variables: + - *TF_VAR_INSTANCE_IMAGE*: `ami-0735c191cf914754d` + - Workflow: + * `_aws_instance_image.ami-0a0d8589b597d65b3` + - Environment variables: + - *TF_VAR_INSTANCE_IMAGE*: `ami-0a0d8589b597d65b3` + - Workflow: + * `_rhel.9,x86,us-west-2` + - Environment variables: + - *TF_VAR_INSTANCE_IMAGE*: `ami-0dda7e535b65b6469` + - Workflow: + * `_ubuntu.2204,arm64,us-west-2` + - Environment variables: + - *TF_VAR_INSTANCE_IMAGE*: `ami-079f51a7bcca65b92` + - Workflow: + * `_ubuntu.2204,x86,us-west-2` + - Environment variables: + - *TF_VAR_INSTANCE_IMAGE*: `ami-0735c191cf914754d` + - Workflow: + +
+ + + * Group "**aws-instance-type**" +
+ Click here to expand this section. + + * `_a1.2xlarge` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `a1.2xlarge` + - Workflow: + * `_a1.metal` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `a1.metal` + - Workflow: + * `_a1.xlarge` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `a1.xlarge` + - Workflow: + * `_aws_instance_type.#` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `#` + - Workflow: + * `_c5.12xlarge` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `c5.12xlarge` + - Workflow: + * `_c5.4xlarge` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `c5.4xlarge` + - Workflow: + * `_c5d.9xlarge` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `c5d.9xlarge` + - Workflow: + * `_g4dn.xlarge` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `g4dn.xlarge` + - Workflow: + * `_inf1.2xlarge` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `inf1.2xlarge` + - Workflow: + * `_inf1.xlarge` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `inf1.xlarge` + - Workflow: + * `_inf2.8xlarge` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `inf2.8xlarge` + - Workflow: + * `_inf2.xlarge` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `inf2.xlarge` + - Workflow: + * `_m7g.2xlarge` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `m7g.2xlarge` + - Workflow: + * `_m7g.xlarge` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `m7g.xlarge` + - Workflow: + * `_t2.#` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `t2.#` + - Workflow: + * `_t2.2xlarge` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `t2.2xlarge` + - Workflow: + * `_t2.large` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `t2.large` + - Workflow: + * `_t2.medium` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `t2.medium` + - Workflow: + * `_t2.micro` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `t2.micro` + - Workflow: + * `_t2.nano` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `t2.nano` + - Workflow: + * `_t2.small` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `t2.small` + - Workflow: + * `_t2.xlarge` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `t2.xlarge` + - Workflow: + +
+ + + * Group "**cloud-provider**" +
+ Click here to expand this section. + + * **`_aws`** (default) + - Environment variables: + - *CM_TERRAFORM_CONFIG_DIR_NAME*: `aws` + - Workflow: + * `_gcp` + - Environment variables: + - *CM_TERRAFORM_CONFIG_DIR_NAME*: `gcp` + - Workflow: + +
+ + + * Group "**gcp-instance-image**" +
+ Click here to expand this section. + + * `_debian-cloud/debian-11` + - Environment variables: + - *TF_VAR_INSTANCE_IMAGE*: `debian-cloud/debian-11` + - Workflow: + * `_gcp_instance_image.#` + - Environment variables: + - *TF_VAR_INSTANCE_IMAGE*: `#` + - Workflow: + * `_ubuntu-2204-jammy-v20230114` + - Environment variables: + - *TF_VAR_INSTANCE_IMAGE*: `ubuntu-2204-jammy-v20230114` + - Workflow: + +
+ + + * Group "**gcp-instance-type**" +
+ Click here to expand this section. + + * `_f1-micro` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `f1-micro` + - Workflow: + * `_gcp_instance_type.#` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `#` + - Workflow: + * `_n1-highmem.#` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `n1-highmem-#` + - Workflow: + * `_n1-standard.#` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `n1-highmem-#` + - Workflow: + +
+ + + * Group "**gcp-project**" +
+ Click here to expand this section. + + * `_gcp_project.#` + - Environment variables: + - *TF_VAR_GCP_PROJECT*: `#` + - Workflow: + +
+ + + * Group "**instance-name**" +
+ Click here to expand this section. + + * `_instance_name.#` + - Environment variables: + - *TF_VAR_INSTANCE_NAME*: `#` + - Workflow: + +
+ + + * Group "**platform**" +
+ Click here to expand this section. + + * `_arm64` + - Environment variables: + - *CM_INSTANCE_PLATFORM*: `arm64` + - Workflow: + * **`_x86`** (default) + - Environment variables: + - *CM_INSTANCE_PLATFORM*: `x86` + - Workflow: + +
+ + + * Group "**region**" +
+ Click here to expand this section. + + * `_region.#` + - Environment variables: + - *TF_VAR_INSTANCE_REGION*: `#` + - Workflow: + * `_us-west-2` + - Environment variables: + - *TF_VAR_INSTANCE_REGION*: `us-west-2` + - Workflow: + +
+ + + * Group "**storage-size**" +
+ Click here to expand this section. + + * `_storage_size.#` + - Environment variables: + - *TF_VAR_DISK_GBS*: `#` + - Workflow: + * `_storage_size.8` + - Environment variables: + - *TF_VAR_DISK_GBS*: `8` + - Workflow: + +
+ + + * Group "**zone**" +
+ Click here to expand this section. + + * `_zone.#` + - Environment variables: + - *TF_VAR_INSTANCE_ZONE*: `#` + - Workflow: + +
+ + +#### Default variations + +`_aws,_x86` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--cminit=value` → `CM_TERRAFORM_CM_INIT=value` +* `--destroy=value` → `CM_DESTROY_TERRAFORM=value` +* `--gcp_credentials_json_file=value` → `CM_GCP_CREDENTIALS_JSON_PATH=value` +* `--key_file=value` → `CM_SSH_KEY_FILE=value` +* `--run_cmds=value` → `CM_TERRAFORM_RUN_COMMANDS=value` +* `--ssh_key_file=value` → `CM_SSH_KEY_FILE=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "cminit":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* TF_VAR_SECURITY_GROUP_ID: `sg-0783752c97d2e011d` +* TF_VAR_CPU_COUNT: `1` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-terraform/_cm.json)*** + * get,terraform + - CM script: [get-terraform](https://github.com/mlcommons/cm4mlops/tree/master/script/get-terraform) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-terraform/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-terraform/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-terraform/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-terraform/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-terraform/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-terraform/_cm.json)*** + * destroy,terraform + * Enable this dependency only if all ENV vars are set:
+`{'CM_DESTROY_TERRAFORM': ['on']}` + * CM names: `--adr.['destroy-cmd']...` + - CM script: [destroy-terraform](https://github.com/mlcommons/cm4mlops/tree/master/script/destroy-terraform) + +___ +### Script output +`cmr "run terraform [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_TERRAFORM_CONFIG_DIR` +* `CM_TERRAFORM_RUN_DIR` +#### New environment keys auto-detected from customize + +* `CM_TERRAFORM_CONFIG_DIR` +* `CM_TERRAFORM_RUN_DIR` \ No newline at end of file diff --git a/docs/Collective-benchmarking/launch-benchmark.md b/docs/Collective-benchmarking/launch-benchmark.md new file mode 100644 index 0000000000..84a904b860 --- /dev/null +++ b/docs/Collective-benchmarking/launch-benchmark.md @@ -0,0 +1,116 @@ +Automatically generated README for this automation recipe: **launch-benchmark** + +Category: **Collective benchmarking** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=launch-benchmark,5dc7662804bc4cad) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/launch-benchmark)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *launch,benchmark* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "launch benchmark" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=launch,benchmark` + +`cm run script --tags=launch,benchmark ` + +*or* + +`cmr "launch benchmark"` + +`cmr "launch benchmark " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'launch,benchmark' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="launch,benchmark"``` + +#### Run this script via Docker (beta) + +`cm docker script "launch benchmark" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/launch-benchmark/_cm.yaml) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/launch-benchmark/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/launch-benchmark/_cm.yaml) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/launch-benchmark/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/launch-benchmark/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/launch-benchmark/_cm.yaml) + +___ +### Script output +`cmr "launch benchmark " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Compiler-automation/get-aocl.md b/docs/Compiler-automation/get-aocl.md new file mode 100644 index 0000000000..efb20e4155 --- /dev/null +++ b/docs/Compiler-automation/get-aocl.md @@ -0,0 +1,137 @@ +Automatically generated README for this automation recipe: **get-aocl** + +Category: **Compiler automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-aocl,a65d3088f57d413d) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aocl)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,lib,aocl,amd-optimized,amd* +* Output cached? *true* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get lib aocl amd-optimized amd" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,lib,aocl,amd-optimized,amd` + +`cm run script --tags=get,lib,aocl,amd-optimized,amd ` + +*or* + +`cmr "get lib aocl amd-optimized amd"` + +`cmr "get lib aocl amd-optimized amd " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,lib,aocl,amd-optimized,amd' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,lib,aocl,amd-optimized,amd"``` + +#### Run this script via Docker (beta) + +`cm docker script "get lib aocl amd-optimized amd" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `4.0` + +* `4.0` +* `master` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aocl/_cm.json)*** + * get,generic,sys-util,_libmpfr-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic-python-lib,_scons + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,git,_repo.https://github.com/amd/aocl-libm-ose + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aocl/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aocl/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aocl/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aocl/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aocl/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aocl/_cm.json) + +___ +### Script output +`cmr "get lib aocl amd-optimized amd " -j` +#### New environment keys (filter) + +* `+LD_LIBRARY_PATH` +* `+LIBRARY_PATH` +* `CM_AOCL_BUILD_PATH` +* `CM_AOCL_LIB_PATH` +* `CM_AOCL_SRC_PATH` +#### New environment keys auto-detected from customize + +* `CM_AOCL_BUILD_PATH` +* `CM_AOCL_LIB_PATH` +* `CM_AOCL_SRC_PATH` \ No newline at end of file diff --git a/docs/Compiler-automation/get-cl.md b/docs/Compiler-automation/get-cl.md new file mode 100644 index 0000000000..6168f13456 --- /dev/null +++ b/docs/Compiler-automation/get-cl.md @@ -0,0 +1,138 @@ +Automatically generated README for this automation recipe: **get-cl** + +Category: **Compiler automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-cl,7dbb770faff947c0) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cl)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,cl,compiler,c-compiler,cpp-compiler,get-cl* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get cl compiler c-compiler cpp-compiler get-cl" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,cl,compiler,c-compiler,cpp-compiler,get-cl` + +`cm run script --tags=get,cl,compiler,c-compiler,cpp-compiler,get-cl ` + +*or* + +`cmr "get cl compiler c-compiler cpp-compiler get-cl"` + +`cmr "get cl compiler c-compiler cpp-compiler get-cl " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,cl,compiler,c-compiler,cpp-compiler,get-cl' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,cl,compiler,c-compiler,cpp-compiler,get-cl"``` + +#### Run this script via Docker (beta) + +`cm docker script "get cl compiler c-compiler cpp-compiler get-cl" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cl/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cl/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cl/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cl/run.bat) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cl/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cl/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cl/_cm.json) + +___ +### Script output +`cmr "get cl compiler c-compiler cpp-compiler get-cl " -j` +#### New environment keys (filter) + +* `+PATH` +* `CM_CL_*` +* `CM_COMPILER_*` +* `CM_CXX_COMPILER_*` +* `CM_C_COMPILER_*` +* `CM_LINKER_*` +#### New environment keys auto-detected from customize + +* `CM_CL_BIN` +* `CM_CL_BIN_WITH_PATH` +* `CM_CL_CACHE_TAGS` +* `CM_COMPILER_CACHE_TAGS` +* `CM_COMPILER_FAMILY` +* `CM_COMPILER_VERSION` +* `CM_CXX_COMPILER_BIN` +* `CM_CXX_COMPILER_FLAG_OUTPUT` +* `CM_CXX_COMPILER_FLAG_VERSION` +* `CM_CXX_COMPILER_WITH_PATH` +* `CM_C_COMPILER_BIN` +* `CM_C_COMPILER_FLAG_OUTPUT` +* `CM_C_COMPILER_FLAG_VERSION` +* `CM_C_COMPILER_WITH_PATH` \ No newline at end of file diff --git a/docs/Compiler-automation/get-compiler-flags.md b/docs/Compiler-automation/get-compiler-flags.md new file mode 100644 index 0000000000..b953d63887 --- /dev/null +++ b/docs/Compiler-automation/get-compiler-flags.md @@ -0,0 +1,130 @@ +Automatically generated README for this automation recipe: **get-compiler-flags** + +Category: **Compiler automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-compiler-flags,31be8b74a69742f8) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-compiler-flags)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,compiler-flags* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get compiler-flags" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,compiler-flags` + +`cm run script --tags=get,compiler-flags ` + +*or* + +`cmr "get compiler-flags"` + +`cmr "get compiler-flags " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,compiler-flags' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,compiler-flags"``` + +#### Run this script via Docker (beta) + +`cm docker script "get compiler-flags" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-compiler-flags/_cm.json)*** + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,compiler + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_C_COMPILER_BIN': ['on']}` + * CM names: `--adr.['compiler']...` + - CM script: [get-cl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cl) + - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) + - CM script: [get-llvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-llvm) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-compiler-flags/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-compiler-flags/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-compiler-flags/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-compiler-flags/_cm.json) + +___ +### Script output +`cmr "get compiler-flags " -j` +#### New environment keys (filter) + +* `+ CFLAGS` +* `+ CXXFLAGS` +* `+ FFLAGS` +* `+ LDFLAGS` +* `+CM_HOST_OS_DEFAULT_INCLUDE_PATH` +#### New environment keys auto-detected from customize diff --git a/docs/Compiler-automation/get-compiler-rust.md b/docs/Compiler-automation/get-compiler-rust.md new file mode 100644 index 0000000000..f0692f37ab --- /dev/null +++ b/docs/Compiler-automation/get-compiler-rust.md @@ -0,0 +1,120 @@ +Automatically generated README for this automation recipe: **get-compiler-rust** + +Category: **Compiler automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-compiler-rust,97ffbd9e537b4b59) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-compiler-rust)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *get,rust-compiler* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get rust-compiler" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,rust-compiler` + +`cm run script --tags=get,rust-compiler ` + +*or* + +`cmr "get rust-compiler"` + +`cmr "get rust-compiler " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,rust-compiler' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,rust-compiler"``` + +#### Run this script via Docker (beta) + +`cm docker script "get rust-compiler" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-compiler-rust/_cm.yaml)*** + * get,python3 + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-compiler-rust/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-compiler-rust/_cm.yaml) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-compiler-rust/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-compiler-rust/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-compiler-rust/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-compiler-rust/_cm.yaml) + +___ +### Script output +`cmr "get rust-compiler " -j` +#### New environment keys (filter) + +* `+PATH` +#### New environment keys auto-detected from customize diff --git a/docs/Compiler-automation/get-gcc.md b/docs/Compiler-automation/get-gcc.md new file mode 100644 index 0000000000..66bb2f11fd --- /dev/null +++ b/docs/Compiler-automation/get-gcc.md @@ -0,0 +1,154 @@ +Automatically generated README for this automation recipe: **get-gcc** + +Category: **Compiler automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-gcc,dbf4ab5cbed74372) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-gcc)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,gcc,compiler,c-compiler,cpp-compiler,get-gcc* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get gcc compiler c-compiler cpp-compiler get-gcc" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,gcc,compiler,c-compiler,cpp-compiler,get-gcc` + +`cm run script --tags=get,gcc,compiler,c-compiler,cpp-compiler,get-gcc ` + +*or* + +`cmr "get gcc compiler c-compiler cpp-compiler get-gcc"` + +`cmr "get gcc compiler c-compiler cpp-compiler get-gcc " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,gcc,compiler,c-compiler,cpp-compiler,get-gcc' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,gcc,compiler,c-compiler,cpp-compiler,get-gcc"``` + +#### Run this script via Docker (beta) + +`cm docker script "get gcc compiler c-compiler cpp-compiler get-gcc" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-gcc/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-gcc/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-gcc/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-gcc/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-gcc/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-gcc/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-gcc/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-gcc/_cm.json)*** + * get,compiler-flags + - CM script: [get-compiler-flags](https://github.com/mlcommons/cm4mlops/tree/master/script/get-compiler-flags) + +___ +### Script output +`cmr "get gcc compiler c-compiler cpp-compiler get-gcc " -j` +#### New environment keys (filter) + +* `+ CFLAGS` +* `+ CXXFLAGS` +* `+ FFLAGS` +* `+ LDFLAGS` +* `+CM_HOST_OS_DEFAULT_INCLUDE_PATH` +* `+PATH` +* `CM_COMPILER_*` +* `CM_CXX_COMPILER_*` +* `CM_C_COMPILER_*` +* `CM_GCC_*` +* `CM_LINKER_*` +#### New environment keys auto-detected from customize + +* `CM_COMPILER_CACHE_TAGS` +* `CM_COMPILER_FAMILY` +* `CM_COMPILER_FLAGS_DEBUG` +* `CM_COMPILER_FLAGS_DEFAULT` +* `CM_COMPILER_FLAGS_FAST` +* `CM_COMPILER_VERSION` +* `CM_CXX_COMPILER_BIN` +* `CM_CXX_COMPILER_FLAG_OUTPUT` +* `CM_CXX_COMPILER_FLAG_VERSION` +* `CM_CXX_COMPILER_WITH_PATH` +* `CM_C_COMPILER_BIN` +* `CM_C_COMPILER_FLAG_OUTPUT` +* `CM_C_COMPILER_FLAG_VERSION` +* `CM_C_COMPILER_WITH_PATH` +* `CM_GCC_BIN` +* `CM_GCC_CACHE_TAGS` +* `CM_GCC_INSTALLED_PATH` +* `CM_LINKER_FLAGS_DEBUG` +* `CM_LINKER_FLAGS_DEFAULT` +* `CM_LINKER_FLAGS_FAST` \ No newline at end of file diff --git a/docs/Compiler-automation/get-go.md b/docs/Compiler-automation/get-go.md new file mode 100644 index 0000000000..963eca0576 --- /dev/null +++ b/docs/Compiler-automation/get-go.md @@ -0,0 +1,126 @@ +Automatically generated README for this automation recipe: **get-go** + +Category: **Compiler automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-go,ab42647a96724a25) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-go)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,tool,go,get-go* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get tool go get-go" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,tool,go,get-go` + +`cm run script --tags=get,tool,go,get-go ` + +*or* + +`cmr "get tool go get-go"` + +`cmr "get tool go get-go " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,tool,go,get-go' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,tool,go,get-go"``` + +#### Run this script via Docker (beta) + +`cm docker script "get tool go get-go" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-go/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-go/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-go/_cm.json)*** + * install,go + * Enable this dependency only if all ENV vars are set:
+`{'CM_REQUIRE_INSTALL': ['yes']}` + - *Warning: no scripts found* + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-go/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-go/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-go/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-go/_cm.json) + +___ +### Script output +`cmr "get tool go get-go " -j` +#### New environment keys (filter) + +* `+PATH` +* `CM_GO_*` +#### New environment keys auto-detected from customize + +* `CM_GO_CACHE_TAGS` +* `CM_GO_INSTALLED_PATH` \ No newline at end of file diff --git a/docs/Compiler-automation/get-llvm.md b/docs/Compiler-automation/get-llvm.md new file mode 100644 index 0000000000..8615f38311 --- /dev/null +++ b/docs/Compiler-automation/get-llvm.md @@ -0,0 +1,175 @@ +Automatically generated README for this automation recipe: **get-llvm** + +Category: **Compiler automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-llvm,99832a103ed04eb8) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-llvm)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,llvm,compiler,c-compiler,cpp-compiler,get-llvm* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get llvm compiler c-compiler cpp-compiler get-llvm" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,llvm,compiler,c-compiler,cpp-compiler,get-llvm` + +`cm run script --tags=get,llvm,compiler,c-compiler,cpp-compiler,get-llvm[,variations] ` + +*or* + +`cmr "get llvm compiler c-compiler cpp-compiler get-llvm"` + +`cmr "get llvm compiler c-compiler cpp-compiler get-llvm [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,llvm,compiler,c-compiler,cpp-compiler,get-llvm' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,llvm,compiler,c-compiler,cpp-compiler,get-llvm"``` + +#### Run this script via Docker (beta) + +`cm docker script "get llvm compiler c-compiler cpp-compiler get-llvm[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_from-prebuilt` + - Workflow: + * `_from-src` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-llvm/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-llvm/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-llvm/_cm.json)*** + * install,llvm + * Enable this dependency only if all ENV vars are set:
+`{'CM_REQUIRE_INSTALL': ['yes']}` + * CM names: `--adr.llvm-install...` + - CM script: [install-llvm-prebuilt](https://github.com/mlcommons/cm4mlops/tree/master/script/install-llvm-prebuilt) + - CM script: [install-llvm-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-llvm-src) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-llvm/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-llvm/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-llvm/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-llvm/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-llvm/_cm.json)*** + * get,compiler-flags + - CM script: [get-compiler-flags](https://github.com/mlcommons/cm4mlops/tree/master/script/get-compiler-flags) + +___ +### Script output +`cmr "get llvm compiler c-compiler cpp-compiler get-llvm [,variations]" -j` +#### New environment keys (filter) + +* `+ CFLAGS` +* `+ CXXFLAGS` +* `+ FFLAGS` +* `+ LDFLAGS` +* `+CM_HOST_OS_DEFAULT_INCLUDE_PATH` +* `+PATH` +* `CM_COMPILER_*` +* `CM_CXX_COMPILER_*` +* `CM_C_COMPILER_*` +* `CM_LINKER_*` +* `CM_LLVM_*` +#### New environment keys auto-detected from customize + +* `CM_COMPILER_CACHE_TAGS` +* `CM_COMPILER_FAMILY` +* `CM_COMPILER_FLAGS_DEBUG` +* `CM_COMPILER_FLAGS_DEFAULT` +* `CM_COMPILER_FLAGS_FAST` +* `CM_COMPILER_VERSION` +* `CM_CXX_COMPILER_BIN` +* `CM_CXX_COMPILER_FLAG_INCLUDE` +* `CM_CXX_COMPILER_FLAG_OUTPUT` +* `CM_CXX_COMPILER_FLAG_VERSION` +* `CM_CXX_COMPILER_WITH_PATH` +* `CM_C_COMPILER_BIN` +* `CM_C_COMPILER_FLAG_INCLUDE` +* `CM_C_COMPILER_FLAG_OUTPUT` +* `CM_C_COMPILER_FLAG_VERSION` +* `CM_C_COMPILER_WITH_PATH` +* `CM_LINKER_FLAGS_DEBUG` +* `CM_LINKER_FLAGS_DEFAULT` +* `CM_LINKER_FLAGS_FAST` +* `CM_LLVM_CLANG_BIN` +* `CM_LLVM_CLANG_CACHE_TAGS` \ No newline at end of file diff --git a/docs/Compiler-automation/install-gcc-src.md b/docs/Compiler-automation/install-gcc-src.md new file mode 100644 index 0000000000..a4fa68a1c5 --- /dev/null +++ b/docs/Compiler-automation/install-gcc-src.md @@ -0,0 +1,127 @@ +Automatically generated README for this automation recipe: **install-gcc-src** + +Category: **Compiler automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-gcc-src,faae0ebd6e1242db) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gcc-src)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,src,gcc,src-gcc* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install src gcc src-gcc" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,src,gcc,src-gcc` + +`cm run script --tags=install,src,gcc,src-gcc ` + +*or* + +`cmr "install src gcc src-gcc"` + +`cmr "install src gcc src-gcc " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,src,gcc,src-gcc' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,src,gcc,src-gcc"``` + +#### Run this script via Docker (beta) + +`cm docker script "install src gcc src-gcc" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `12` + +* `master` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gcc-src/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gcc-src/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gcc-src/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gcc-src/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gcc-src/_cm.json) + 1. Run "postrocess" function from customize.py + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gcc-src/_cm.json)*** + * get,gcc + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_REQUIRE_INSTALL': ['yes']}` + - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) + +___ +### Script output +`cmr "install src gcc src-gcc " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Compiler-automation/install-ipex-from-src.md b/docs/Compiler-automation/install-ipex-from-src.md new file mode 100644 index 0000000000..9762178c4f --- /dev/null +++ b/docs/Compiler-automation/install-ipex-from-src.md @@ -0,0 +1,198 @@ +Automatically generated README for this automation recipe: **install-ipex-from-src** + +Category: **Compiler automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-ipex-from-src,09364fff2bf04516) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-ipex-from-src)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,get,src,from.src,ipex,src-ipex* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install get src from.src ipex src-ipex" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,get,src,from.src,ipex,src-ipex` + +`cm run script --tags=install,get,src,from.src,ipex,src-ipex[,variations] ` + +*or* + +`cmr "install get src from.src ipex src-ipex"` + +`cmr "install get src from.src ipex src-ipex [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,get,src,from.src,ipex,src-ipex' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,get,src,from.src,ipex,src-ipex"``` + +#### Run this script via Docker (beta) + +`cm docker script "install get src from.src ipex src-ipex[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_branch.#` + - Environment variables: + - *CM_GIT_CHECKOUT*: `#` + - Workflow: + * `_for-intel-mlperf-inference-v3.1-gptj` + - Environment variables: + - *CM_CONDA_ENV*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,conda,_name.gptj-pt + * CM names: `--adr.['conda']...` + - CM script: [get-conda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-conda) + * get,generic,conda-package,_package.python + * CM names: `--adr.['conda-package', 'python3']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.wheel,_source.conda-forge + * CM names: `--adr.['conda-package', 'wheel']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.setuptools,_source.conda-forge + * CM names: `--adr.['conda-package', 'setuptools']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * install,llvm,src,_for-intel-mlperf-inference-v3.1-gptj + - CM script: [install-llvm-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-llvm-src) + * `_sha.#` + - Environment variables: + - *CM_GIT_CHECKOUT_SHA*: `#` + - Workflow: + * `_tag.#` + - Environment variables: + - *CM_GIT_CHECKOUT_TAG*: `#` + - Workflow: + +
+ + + * Group "**repo**" +
+ Click here to expand this section. + + * `_repo.#` + - Environment variables: + - *CM_GIT_URL*: `#` + - Workflow: + * **`_repo.https://github.com/intel/intel-extension-for-pytorch`** (default) + - Environment variables: + - *CM_GIT_URL*: `https://github.com/intel/intel-extension-for-pytorch` + - Workflow: + +
+ + +#### Default variations + +`_repo.https://github.com/intel/intel-extension-for-pytorch` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-ipex-from-src/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,python3 + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_CONDA_ENV': ['yes']}` + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,pytorch,from.src + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_CONDA_ENV': ['yes']}` + * CM names: `--adr.['pytorch']...` + - CM script: [install-pytorch-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-pytorch-from-src) + * get,git,repo + * CM names: `--adr.['ipex-src-repo']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-ipex-from-src/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-ipex-from-src/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-ipex-from-src/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-ipex-from-src/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-ipex-from-src/_cm.json) + +___ +### Script output +`cmr "install get src from.src ipex src-ipex [,variations]" -j` +#### New environment keys (filter) + +* `CM_IPEX_*` +#### New environment keys auto-detected from customize diff --git a/docs/Compiler-automation/install-llvm-prebuilt.md b/docs/Compiler-automation/install-llvm-prebuilt.md new file mode 100644 index 0000000000..6d338b092a --- /dev/null +++ b/docs/Compiler-automation/install-llvm-prebuilt.md @@ -0,0 +1,137 @@ +Automatically generated README for this automation recipe: **install-llvm-prebuilt** + +Category: **Compiler automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-llvm-prebuilt,cda9094971724a0a) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-prebuilt)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,prebuilt,llvm,prebuilt-llvm,install-prebuilt-llvm* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install prebuilt llvm prebuilt-llvm install-prebuilt-llvm" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,prebuilt,llvm,prebuilt-llvm,install-prebuilt-llvm` + +`cm run script --tags=install,prebuilt,llvm,prebuilt-llvm,install-prebuilt-llvm ` + +*or* + +`cmr "install prebuilt llvm prebuilt-llvm install-prebuilt-llvm"` + +`cmr "install prebuilt llvm prebuilt-llvm install-prebuilt-llvm " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,prebuilt,llvm,prebuilt-llvm,install-prebuilt-llvm' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,prebuilt,llvm,prebuilt-llvm,install-prebuilt-llvm"``` + +#### Run this script via Docker (beta) + +`cm docker script "install prebuilt llvm prebuilt-llvm install-prebuilt-llvm" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `15.0.6` + +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-prebuilt/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-prebuilt/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-prebuilt/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-prebuilt/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-prebuilt/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-prebuilt/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-prebuilt/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-prebuilt/_cm.json)*** + * get,llvm + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_REQUIRE_INSTALL': ['yes']}` + - CM script: [get-llvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-llvm) + +___ +### Script output +`cmr "install prebuilt llvm prebuilt-llvm install-prebuilt-llvm " -j` +#### New environment keys (filter) + +* `+CPLUS_INCLUDE_PATH` +* `+C_INCLUDE_PATH` +* `+LD_LIBRARY_PATH` +* `+PATH` +* `CM_COMPILER_NAME` +* `CM_LLVM_*` +#### New environment keys auto-detected from customize + +* `CM_LLVM_CLANG_BIN_WITH_PATH` +* `CM_LLVM_INSTALLED_PATH` +* `CM_LLVM_PACKAGE` \ No newline at end of file diff --git a/docs/Compiler-automation/install-llvm-src.md b/docs/Compiler-automation/install-llvm-src.md new file mode 100644 index 0000000000..331fbea923 --- /dev/null +++ b/docs/Compiler-automation/install-llvm-src.md @@ -0,0 +1,292 @@ +Automatically generated README for this automation recipe: **install-llvm-src** + +Category: **Compiler automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-llvm-src,2af16e9a6c5f4702) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-src)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,src,llvm,from.src,src-llvm* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install src llvm from.src src-llvm" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,src,llvm,from.src,src-llvm` + +`cm run script --tags=install,src,llvm,from.src,src-llvm[,variations] ` + +*or* + +`cmr "install src llvm from.src src-llvm"` + +`cmr "install src llvm from.src src-llvm [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,src,llvm,from.src,src-llvm' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,src,llvm,from.src,src-llvm"``` + +#### Run this script via Docker (beta) + +`cm docker script "install src llvm from.src src-llvm[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_branch.#` + - Environment variables: + - *CM_GIT_CHECKOUT*: `#` + - Workflow: + * `_for-intel-mlperf-inference-v3.1-bert` + - Environment variables: + - *CM_LLVM_CONDA_ENV*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,gcc + - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) + * get,conda,_name.bert-pt + * CM names: `--adr.['conda']...` + - CM script: [get-conda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-conda) + * get,conda-package,_package.ncurses,_source.conda-forge + * CM names: `--adr.['conda-package', 'ncurses']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.ninja + * CM names: `--adr.['conda-package', 'ninja']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.cmake + * CM names: `--adr.['conda-package', 'cmake']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,conda-package,_package.llvm-openmp,_source.conda-forge + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,conda-package,_package.chardet + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.libstdcxx-ng,_source.conda-forge + * CM names: `--adr.['conda-package', 'libstdcxx-ng']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * `_for-intel-mlperf-inference-v3.1-gptj` + - Environment variables: + - *CM_LLVM_CONDA_ENV*: `yes` + - *CM_LLVM_16_INTEL_MLPERF_INFERENCE*: `yes` + - *USE_CUDA*: `0` + - *CUDA_VISIBLE_DEVICES*: `` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-sys-util,_g++-12 + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,gcc + - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) + * get,conda,_name.gptj-pt + * CM names: `--adr.['conda']...` + - CM script: [get-conda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-conda) + * get,generic,conda-package,_package.python + * CM names: `--adr.['conda-package', 'python']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,conda-package,_package.ncurses,_source.conda-forge + * CM names: `--adr.['conda-package', 'ncurses']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,conda-package,_package.chardet + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.libstdcxx-ng,_source.conda-forge + * CM names: `--adr.['conda-package', 'libstdcxx-ng']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.mkl,_source.intel + * CM names: `--adr.['conda-package', 'mkl']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.mkl-include,_source.intel + * CM names: `--adr.['conda-package', 'mkl-include']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.intel-openmp,_source.intel + * CM names: `--adr.['conda-package', 'intel-openmp']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.gperftools,_source.conda-forge + * CM names: `--adr.['conda-package', 'gperftools']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.pybind11,_source.conda-forge + * CM names: `--adr.['conda-package', 'pybind11']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic-python-lib,_custom-python,_package.torch,_url.git+https://github.com/pytorch/pytorch.git@927dc662386af052018212c7d01309a506fc94cd + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_custom-python,_package.setuptools + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_custom-python,_package.neural-compressor,_url.git+https://github.com/intel/neural-compressor.git@a2931eaa4052eec195be3c79a13f7bfa23e54473 + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_full-history` + - Workflow: + * `_runtimes.#` + - Environment variables: + - *CM_LLVM_ENABLE_RUNTIMES*: `#` + - Workflow: + * `_sha.#` + - Environment variables: + - *CM_GIT_CHECKOUT_SHA*: `#` + - Workflow: + * `_tag.#` + - Environment variables: + - *CM_GIT_CHECKOUT_TAG*: `#` + - Workflow: + +
+ + + * Group "**build-type**" +
+ Click here to expand this section. + + * `_debug` + - Environment variables: + - *CM_LLVM_BUILD_TYPE*: `debug` + - Workflow: + * **`_release`** (default) + - Environment variables: + - *CM_LLVM_BUILD_TYPE*: `release` + - Workflow: + +
+ + + * Group "**clang**" +
+ Click here to expand this section. + + * **`_clang`** (default) + - Environment variables: + - *CM_LLVM_ENABLE_PROJECTS*: `clang` + - Workflow: + +
+ + + * Group "**repo**" +
+ Click here to expand this section. + + * `_repo.#` + - Environment variables: + - *CM_GIT_URL*: `#` + - Workflow: + +
+ + +#### Default variations + +`_clang,_release` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-src/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,cmake + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_LLVM_CONDA_ENV': ['yes']}` + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + * get,generic-sys-util,_ninja-build + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_LLVM_CONDA_ENV': ['yes']}` + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,git,repo + * CM names: `--adr.['llvm-src-repo']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-src/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-src/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-src/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-src/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-src/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-src/_cm.json)*** + * get,llvm + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_REQUIRE_INSTALL': ['yes']}` + - CM script: [get-llvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-llvm) + +___ +### Script output +`cmr "install src llvm from.src src-llvm [,variations]" -j` +#### New environment keys (filter) + +* `+C_INCLUDE_PATH` +* `+PATH` +* `CM_GET_DEPENDENT_CACHED_PATH` +* `CM_LLVM_*` +#### New environment keys auto-detected from customize + +* `CM_GET_DEPENDENT_CACHED_PATH` +* `CM_LLVM_CLANG_BIN_WITH_PATH` +* `CM_LLVM_CMAKE_CMD` +* `CM_LLVM_INSTALLED_PATH` \ No newline at end of file diff --git a/docs/Compiler-automation/install-onednn-from-src.md b/docs/Compiler-automation/install-onednn-from-src.md new file mode 100644 index 0000000000..3d9232a764 --- /dev/null +++ b/docs/Compiler-automation/install-onednn-from-src.md @@ -0,0 +1,181 @@ +Automatically generated README for this automation recipe: **install-onednn-from-src** + +Category: **Compiler automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-onednn-from-src,fe3a652e315f4c8f) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-onednn-from-src)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,get,src,from.src,onednn,src-onednn* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install get src from.src onednn src-onednn" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,get,src,from.src,onednn,src-onednn` + +`cm run script --tags=install,get,src,from.src,onednn,src-onednn[,variations] ` + +*or* + +`cmr "install get src from.src onednn src-onednn"` + +`cmr "install get src from.src onednn src-onednn [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,get,src,from.src,onednn,src-onednn' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,get,src,from.src,onednn,src-onednn"``` + +#### Run this script via Docker (beta) + +`cm docker script "install get src from.src onednn src-onednn[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_branch.#` + - Environment variables: + - *CM_GIT_CHECKOUT*: `#` + - Workflow: + * `_for-intel-mlperf-inference-v3.1-bert` + - Environment variables: + - *CM_CONDA_ENV*: `yes` + - *CM_FOR_INTEL_MLPERF_INFERENCE*: `yes` + - Workflow: + * `_sha.#` + - Environment variables: + - *CM_GIT_CHECKOUT_SHA*: `#` + - Workflow: + * `_tag.#` + - Environment variables: + - *CM_GIT_CHECKOUT_TAG*: `#` + - Workflow: + +
+ + + * Group "**repo**" +
+ Click here to expand this section. + + * `_repo.#` + - Environment variables: + - *CM_GIT_URL*: `#` + - Workflow: + * **`_repo.https://github.com/oneapi-src/oneDNN`** (default) + - Environment variables: + - *CM_GIT_URL*: `https://github.com/oneapi-src/oneDNN` + - Workflow: + +
+ + +#### Default variations + +`_repo.https://github.com/oneapi-src/oneDNN` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-onednn-from-src/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,python3 + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_CONDA_ENV': ['yes']}` + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,git,repo + * CM names: `--adr.['onednn-src-repo']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-onednn-from-src/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-onednn-from-src/_cm.json) + 1. ***Run native script if exists*** + * [run-intel-mlperf-inference.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-onednn-from-src/run-intel-mlperf-inference.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-onednn-from-src/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-onednn-from-src/_cm.json) + +___ +### Script output +`cmr "install get src from.src onednn src-onednn [,variations]" -j` +#### New environment keys (filter) + +* `CM_ONEDNN_*` +#### New environment keys auto-detected from customize + +* `CM_ONEDNN_INSTALLED_PATH` \ No newline at end of file diff --git a/docs/Compiler-automation/install-onnxruntime-from-src.md b/docs/Compiler-automation/install-onnxruntime-from-src.md new file mode 100644 index 0000000000..e99fa55e7a --- /dev/null +++ b/docs/Compiler-automation/install-onnxruntime-from-src.md @@ -0,0 +1,184 @@ +Automatically generated README for this automation recipe: **install-onnxruntime-from-src** + +Category: **Compiler automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-onnxruntime-from-src,9798c7e7a5944cee) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-onnxruntime-from-src)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,get,src,from.src,onnxruntime,src-onnxruntime* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install get src from.src onnxruntime src-onnxruntime" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,get,src,from.src,onnxruntime,src-onnxruntime` + +`cm run script --tags=install,get,src,from.src,onnxruntime,src-onnxruntime[,variations] ` + +*or* + +`cmr "install get src from.src onnxruntime src-onnxruntime"` + +`cmr "install get src from.src onnxruntime src-onnxruntime [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,get,src,from.src,onnxruntime,src-onnxruntime' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,get,src,from.src,onnxruntime,src-onnxruntime"``` + +#### Run this script via Docker (beta) + +`cm docker script "install get src from.src onnxruntime src-onnxruntime[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_branch.#` + - Environment variables: + - *CM_GIT_CHECKOUT*: `#` + - Workflow: + * `_cuda` + - Environment variables: + - *CM_ONNXRUNTIME_GPU*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,cuda,_cudnn + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * `_sha.#` + - Environment variables: + - *CM_GIT_CHECKOUT_SHA*: `#` + - Workflow: + * `_tag.#` + - Environment variables: + - *CM_GIT_CHECKOUT_TAG*: `#` + - Workflow: + +
+ + + * Group "**repo**" +
+ Click here to expand this section. + + * **`_repo.https://github.com/Microsoft/onnxruntime`** (default) + - Environment variables: + - *CM_GIT_URL*: `https://github.com/Microsoft/onnxruntime` + - Workflow: + +
+ + +#### Default variations + +`_repo.https://github.com/Microsoft/onnxruntime` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-onnxruntime-from-src/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * fail,filter,_windows + - CM script: [fail](https://github.com/mlcommons/cm4mlops/tree/master/script/fail) + * get,python3 + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_CONDA_ENV': ['yes']}` + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,cmake + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + * get,gcc + - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) + * get,git,repo + * CM names: `--adr.['onnxruntime-src-repo']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-onnxruntime-from-src/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-onnxruntime-from-src/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-onnxruntime-from-src/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-onnxruntime-from-src/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-onnxruntime-from-src/_cm.json) + +___ +### Script output +`cmr "install get src from.src onnxruntime src-onnxruntime [,variations]" -j` +#### New environment keys (filter) + +* `CM_ONNXRUNTIME_*` +#### New environment keys auto-detected from customize diff --git a/docs/Compiler-automation/install-pytorch-from-src.md b/docs/Compiler-automation/install-pytorch-from-src.md new file mode 100644 index 0000000000..c63c6219f9 --- /dev/null +++ b/docs/Compiler-automation/install-pytorch-from-src.md @@ -0,0 +1,248 @@ +Automatically generated README for this automation recipe: **install-pytorch-from-src** + +Category: **Compiler automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-pytorch-from-src,64eaf3e81de94f41) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-from-src)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,get,src,from.src,pytorch,src-pytorch* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install get src from.src pytorch src-pytorch" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,get,src,from.src,pytorch,src-pytorch` + +`cm run script --tags=install,get,src,from.src,pytorch,src-pytorch[,variations] ` + +*or* + +`cmr "install get src from.src pytorch src-pytorch"` + +`cmr "install get src from.src pytorch src-pytorch [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,get,src,from.src,pytorch,src-pytorch' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,get,src,from.src,pytorch,src-pytorch"``` + +#### Run this script via Docker (beta) + +`cm docker script "install get src from.src pytorch src-pytorch[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_branch.#` + - Environment variables: + - *CM_GIT_CHECKOUT*: `#` + - Workflow: + * `_cuda` + - Environment variables: + - *CUDA_HOME*: `<<>>` + - *CUDNN_LIBRARY_PATH*: `<<>>` + - *CUDNN_INCLUDE_PATH*: `<<>>` + - *CUDA_NVCC_EXECUTABLE*: `<<>>` + - *USE_CUDA*: `1` + - *USE_CUDNN*: `1` + - *TORCH_CUDA_ARCH_LIST*: `Ampere Ada Hopper` + - *TORCH_CXX_FLAGS*: `-D_GLIBCXX_USE_CXX11_ABI=1` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,cuda,_cudnn + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * `_for-intel-mlperf-inference-v3.1-bert` + - Environment variables: + - *CM_CONDA_ENV*: `yes` + - *CM_MLPERF_INFERENCE_INTEL*: `yes` + - *USE_CUDA*: `0` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-sys-util,_libffi7 + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,conda,_name.bert-pt + * CM names: `--adr.['conda']...` + - CM script: [get-conda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-conda) + * get,generic,conda-package,_package.ncurses,_source.conda-forge + * CM names: `--adr.['conda-package', 'ncurses']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.python + * CM names: `--adr.['conda-package', 'python3']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * install,llvm,src,_tag.llvmorg-15.0.7,_runtimes.libcxx:libcxxabi:openmp,_clang,_release,_for-intel-mlperf-inference-v3.1-bert + - CM script: [install-llvm-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-llvm-src) + * get,generic,conda-package,_package.ninja + * CM names: `--adr.['conda-package', 'ninja']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.cmake + * CM names: `--adr.['conda-package', 'cmake']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.mkl,_source.intel + * CM names: `--adr.['conda-package', 'mkl']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.mkl-include,_source.intel + * CM names: `--adr.['conda-package', 'mkl-include']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.intel-openmp,_source.intel + * CM names: `--adr.['conda-package', 'intel-openmp']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.llvm-openmp,_source.conda-forge + * CM names: `--adr.['conda-package', 'llvm-openmp']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.jemalloc,_source.conda-forge + * CM names: `--adr.['conda-package', 'jemalloc']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.wheel,_source.conda-forge + * CM names: `--adr.['conda-package', 'wheel']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.setuptools,_source.conda-forge + * CM names: `--adr.['conda-package', 'setuptools']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.future,_source.conda-forge + * CM names: `--adr.['conda-package', 'future']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.libstdcxx-ng,_source.conda-forge + * CM names: `--adr.['conda-package', 'libstdcxx-ng']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * `_for-nvidia-mlperf-inference-v3.1` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,cmake + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + * `_sha.#` + - Environment variables: + - *CM_GIT_CHECKOUT_SHA*: `#` + - Workflow: + * `_tag.#` + - Environment variables: + - *CM_GIT_CHECKOUT_TAG*: `#` + - Workflow: + +
+ + + * Group "**repo**" +
+ Click here to expand this section. + + * `_repo.#` + - Environment variables: + - *CM_GIT_URL*: `#` + - Workflow: + * **`_repo.https://github.com/pytorch/pytorch`** (default) + - Environment variables: + - *CM_GIT_URL*: `https://github.com/pytorch/pytorch` + - Workflow: + +
+ + +#### Default variations + +`_repo.https://github.com/pytorch/pytorch` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-from-src/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,python3 + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_CONDA_ENV': ['yes']}` + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,git,repo + * CM names: `--adr.['pytorch-src-repo']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-from-src/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-from-src/_cm.json) + 1. ***Run native script if exists*** + * [run-intel-mlperf-inference-v3_1.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-from-src/run-intel-mlperf-inference-v3_1.sh) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-from-src/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-from-src/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-from-src/_cm.json) + +___ +### Script output +`cmr "install get src from.src pytorch src-pytorch [,variations]" -j` +#### New environment keys (filter) + +* `CM_PYTORCH_*` +#### New environment keys auto-detected from customize diff --git a/docs/Compiler-automation/install-pytorch-kineto-from-src.md b/docs/Compiler-automation/install-pytorch-kineto-from-src.md new file mode 100644 index 0000000000..5b38ea6ed2 --- /dev/null +++ b/docs/Compiler-automation/install-pytorch-kineto-from-src.md @@ -0,0 +1,191 @@ +Automatically generated README for this automation recipe: **install-pytorch-kineto-from-src** + +Category: **Compiler automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-pytorch-kineto-from-src,98a4b061712d4483) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-kineto-from-src)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,get,src,from.src,pytorch-kineto,kineto,src-pytorch-kineto* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install get src from.src pytorch-kineto kineto src-pytorch-kineto" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,get,src,from.src,pytorch-kineto,kineto,src-pytorch-kineto` + +`cm run script --tags=install,get,src,from.src,pytorch-kineto,kineto,src-pytorch-kineto[,variations] ` + +*or* + +`cmr "install get src from.src pytorch-kineto kineto src-pytorch-kineto"` + +`cmr "install get src from.src pytorch-kineto kineto src-pytorch-kineto [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,get,src,from.src,pytorch-kineto,kineto,src-pytorch-kineto' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,get,src,from.src,pytorch-kineto,kineto,src-pytorch-kineto"``` + +#### Run this script via Docker (beta) + +`cm docker script "install get src from.src pytorch-kineto kineto src-pytorch-kineto[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_branch.#` + - Environment variables: + - *CM_GIT_CHECKOUT*: `#` + - Workflow: + * `_cuda` + - Environment variables: + - *CUDA_HOME*: `<<>>` + - *CUDA_NVCC_EXECUTABLE*: `<<>>` + - *CUDNN_INCLUDE_PATH*: `<<>>` + - *CUDNN_LIBRARY_PATH*: `<<>>` + - *TORCH_CUDA_ARCH_LIST*: `Ampere Ada Hopper` + - *TORCH_CXX_FLAGS*: `-D_GLIBCXX_USE_CXX11_ABI=1` + - *USE_CUDA*: `1` + - *USE_CUDNN*: `1` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,cuda,_cudnn + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * `_sha.#` + - Environment variables: + - *CM_GIT_CHECKOUT_SHA*: `#` + - Workflow: + * `_tag.#` + - Environment variables: + - *CM_GIT_CHECKOUT_TAG*: `#` + - Workflow: + +
+ + + * Group "**repo**" +
+ Click here to expand this section. + + * `_repo.#` + - Environment variables: + - *CM_GIT_URL*: `#` + - Workflow: + * **`_repo.https://github.com/pytorch/kineto`** (default) + - Environment variables: + - *CM_GIT_URL*: `https://github.com/pytorch/kineto` + - Workflow: + +
+ + +#### Default variations + +`_repo.https://github.com/pytorch/kineto` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-kineto-from-src/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,python3 + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_CONDA_ENV': ['yes']}` + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,cmake + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + * get,git,repo + * CM names: `--adr.['pytorch-kineto-src-repo']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-kineto-from-src/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-kineto-from-src/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-kineto-from-src/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-kineto-from-src/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-kineto-from-src/_cm.json) + +___ +### Script output +`cmr "install get src from.src pytorch-kineto kineto src-pytorch-kineto [,variations]" -j` +#### New environment keys (filter) + +* `CM_PYTORCH_KINETO_*` +#### New environment keys auto-detected from customize diff --git a/docs/Compiler-automation/install-torchvision-from-src.md b/docs/Compiler-automation/install-torchvision-from-src.md new file mode 100644 index 0000000000..c269b624b7 --- /dev/null +++ b/docs/Compiler-automation/install-torchvision-from-src.md @@ -0,0 +1,194 @@ +Automatically generated README for this automation recipe: **install-torchvision-from-src** + +Category: **Compiler automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-torchvision-from-src,68b855780d474546) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-torchvision-from-src)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,get,src,from.src,pytorchvision,torchvision,src-pytorchvision* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install get src from.src pytorchvision torchvision src-pytorchvision" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,get,src,from.src,pytorchvision,torchvision,src-pytorchvision` + +`cm run script --tags=install,get,src,from.src,pytorchvision,torchvision,src-pytorchvision[,variations] ` + +*or* + +`cmr "install get src from.src pytorchvision torchvision src-pytorchvision"` + +`cmr "install get src from.src pytorchvision torchvision src-pytorchvision [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,get,src,from.src,pytorchvision,torchvision,src-pytorchvision' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,get,src,from.src,pytorchvision,torchvision,src-pytorchvision"``` + +#### Run this script via Docker (beta) + +`cm docker script "install get src from.src pytorchvision torchvision src-pytorchvision[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_branch.#` + - Environment variables: + - *CM_GIT_CHECKOUT*: `#` + - Workflow: + * `_cuda` + - Environment variables: + - *CUDA_HOME*: `<<>>` + - *CUDA_NVCC_EXECUTABLE*: `<<>>` + - *CUDNN_INCLUDE_PATH*: `<<>>` + - *CUDNN_LIBRARY_PATH*: `<<>>` + - *USE_CUDA*: `1` + - *USE_CUDNN*: `1` + - *TORCH_CUDA_ARCH_LIST*: `Ampere Ada Hopper` + - *TORCH_CXX_FLAGS*: `-D_GLIBCXX_USE_CXX11_ABI=1` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,cuda,_cudnn + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * `_for-nvidia-mlperf-inference-v3.1` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * install,pytorch,from.src,_for-nvidia-mlperf-inference-v3.1 + - CM script: [install-pytorch-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-pytorch-from-src) + * `_sha.#` + - Environment variables: + - *CM_GIT_CHECKOUT_SHA*: `#` + - Workflow: + * `_tag.#` + - Environment variables: + - *CM_GIT_CHECKOUT_TAG*: `#` + - Workflow: + +
+ + + * Group "**repo**" +
+ Click here to expand this section. + + * `_repo.#` + - Environment variables: + - *CM_GIT_URL*: `#` + - Workflow: + * **`_repo.https://github.com/pytorch/vision`** (default) + - Environment variables: + - *CM_GIT_URL*: `https://github.com/pytorch/vision` + - Workflow: + +
+ + +#### Default variations + +`_repo.https://github.com/pytorch/vision` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-torchvision-from-src/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,python3 + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_CONDA_ENV': ['yes']}` + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,git,repo + * CM names: `--adr.['pytorchision-src-repo', 'torchision-src-repo']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-torchvision-from-src/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-torchvision-from-src/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-torchvision-from-src/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-torchvision-from-src/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-torchvision-from-src/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-torchvision-from-src/_cm.json) + +___ +### Script output +`cmr "install get src from.src pytorchvision torchvision src-pytorchvision [,variations]" -j` +#### New environment keys (filter) + +* `CM_PYTORCHVISION_*` +#### New environment keys auto-detected from customize diff --git a/docs/Compiler-automation/install-tpp-pytorch-extension.md b/docs/Compiler-automation/install-tpp-pytorch-extension.md new file mode 100644 index 0000000000..56669e10fa --- /dev/null +++ b/docs/Compiler-automation/install-tpp-pytorch-extension.md @@ -0,0 +1,198 @@ +Automatically generated README for this automation recipe: **install-tpp-pytorch-extension** + +Category: **Compiler automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-tpp-pytorch-extension,1701d2f5f4e84d42) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tpp-pytorch-extension)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,get,src,from.src,tpp-pex,src-tpp-pex* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install get src from.src tpp-pex src-tpp-pex" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,get,src,from.src,tpp-pex,src-tpp-pex` + +`cm run script --tags=install,get,src,from.src,tpp-pex,src-tpp-pex[,variations] ` + +*or* + +`cmr "install get src from.src tpp-pex src-tpp-pex"` + +`cmr "install get src from.src tpp-pex src-tpp-pex [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,get,src,from.src,tpp-pex,src-tpp-pex' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,get,src,from.src,tpp-pex,src-tpp-pex"``` + +#### Run this script via Docker (beta) + +`cm docker script "install get src from.src tpp-pex src-tpp-pex[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_branch.#` + - Environment variables: + - *CM_GIT_CHECKOUT*: `#` + - Workflow: + * `_for-intel-mlperf-inference-v3.1-gptj` + - Environment variables: + - *CM_CONDA_ENV*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,conda,_name.gptj-pt + * CM names: `--adr.['conda']...` + - CM script: [get-conda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-conda) + * get,generic,conda-package,_package.python + * CM names: `--adr.['conda-package', 'python3']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.wheel,_source.conda-forge + * CM names: `--adr.['conda-package', 'wheel']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.setuptools,_source.conda-forge + * CM names: `--adr.['conda-package', 'setuptools']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * install,llvm,src,_for-intel-mlperf-inference-v3.1-gptj + - CM script: [install-llvm-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-llvm-src) + * `_sha.#` + - Environment variables: + - *CM_GIT_CHECKOUT_SHA*: `#` + - Workflow: + * `_tag.#` + - Environment variables: + - *CM_GIT_CHECKOUT_TAG*: `#` + - Workflow: + +
+ + + * Group "**repo**" +
+ Click here to expand this section. + + * `_repo.#` + - Environment variables: + - *CM_GIT_URL*: `#` + - Workflow: + * **`_repo.https://github.com/libxsmm/tpp-pytorch-extension`** (default) + - Environment variables: + - *CM_GIT_URL*: `https://github.com/libxsmm/tpp-pytorch-extension` + - Workflow: + +
+ + +#### Default variations + +`_repo.https://github.com/libxsmm/tpp-pytorch-extension` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tpp-pytorch-extension/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,python3 + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_CONDA_ENV': ['yes']}` + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,pytorch,from.src + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_CONDA_ENV': ['yes']}` + * CM names: `--adr.['pytorch']...` + - CM script: [install-pytorch-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-pytorch-from-src) + * get,git,repo + * CM names: `--adr.['tpp-pex-src-repo']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tpp-pytorch-extension/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tpp-pytorch-extension/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tpp-pytorch-extension/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tpp-pytorch-extension/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tpp-pytorch-extension/_cm.json) + +___ +### Script output +`cmr "install get src from.src tpp-pex src-tpp-pex [,variations]" -j` +#### New environment keys (filter) + +* `CM_TPP_PEX_*` +#### New environment keys auto-detected from customize diff --git a/docs/Compiler-automation/install-transformers-from-src.md b/docs/Compiler-automation/install-transformers-from-src.md new file mode 100644 index 0000000000..0ac334c3cf --- /dev/null +++ b/docs/Compiler-automation/install-transformers-from-src.md @@ -0,0 +1,196 @@ +Automatically generated README for this automation recipe: **install-transformers-from-src** + +Category: **Compiler automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-transformers-from-src,88512c48ea5c4186) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-transformers-from-src)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,src,from.src,transformers,src-transformers* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install src from.src transformers src-transformers" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,src,from.src,transformers,src-transformers` + +`cm run script --tags=install,src,from.src,transformers,src-transformers[,variations] ` + +*or* + +`cmr "install src from.src transformers src-transformers"` + +`cmr "install src from.src transformers src-transformers [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,src,from.src,transformers,src-transformers' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,src,from.src,transformers,src-transformers"``` + +#### Run this script via Docker (beta) + +`cm docker script "install src from.src transformers src-transformers[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_branch.#` + - Environment variables: + - *CM_GIT_CHECKOUT*: `#` + - Workflow: + * `_for-intel-mlperf-inference-v3.1-bert` + - Environment variables: + - *CM_CONDA_ENV*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,conda,_name.bert-pt + * CM names: `--adr.['conda']...` + - CM script: [get-conda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-conda) + * get,generic,conda-package,_package.python + * CM names: `--adr.['conda-package', 'python3']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.wheel,_source.conda-forge + * CM names: `--adr.['conda-package', 'wheel']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.setuptools,_source.conda-forge + * CM names: `--adr.['conda-package', 'setuptools']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * `_sha.#` + - Environment variables: + - *CM_GIT_CHECKOUT_SHA*: `#` + - Workflow: + * `_tag.#` + - Environment variables: + - *CM_GIT_CHECKOUT_TAG*: `#` + - Workflow: + +
+ + + * Group "**repo**" +
+ Click here to expand this section. + + * `_repo.#` + - Environment variables: + - *CM_GIT_URL*: `#` + - Workflow: + * **`_repo.https://github.com/pytorch/pytorch`** (default) + - Environment variables: + - *CM_GIT_URL*: `https://github.com/huggingface/transformers` + - Workflow: + +
+ + +#### Default variations + +`_repo.https://github.com/pytorch/pytorch` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-transformers-from-src/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,python3 + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_CONDA_ENV': ['yes']}` + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,pytorch,from.src + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_CONDA_ENV': ['yes']}` + * CM names: `--adr.['pytorch']...` + - CM script: [install-pytorch-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-pytorch-from-src) + * get,git,repo + * CM names: `--adr.['transformers-src-repo']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-transformers-from-src/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-transformers-from-src/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-transformers-from-src/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-transformers-from-src/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-transformers-from-src/_cm.json) + +___ +### Script output +`cmr "install src from.src transformers src-transformers [,variations]" -j` +#### New environment keys (filter) + +* `CM_TRANSFORMERS_*` +#### New environment keys auto-detected from customize diff --git a/docs/Dashboard-automation/publish-results-to-dashboard.md b/docs/Dashboard-automation/publish-results-to-dashboard.md new file mode 100644 index 0000000000..d599850093 --- /dev/null +++ b/docs/Dashboard-automation/publish-results-to-dashboard.md @@ -0,0 +1,123 @@ +Automatically generated README for this automation recipe: **publish-results-to-dashboard** + +Category: **Dashboard automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=publish-results-to-dashboard,4af3a2d09f14412b) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/publish-results-to-dashboard)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *publish-results,dashboard* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "publish-results dashboard" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=publish-results,dashboard` + +`cm run script --tags=publish-results,dashboard ` + +*or* + +`cmr "publish-results dashboard"` + +`cmr "publish-results dashboard " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'publish-results,dashboard' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="publish-results,dashboard"``` + +#### Run this script via Docker (beta) + +`cm docker script "publish-results dashboard" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/publish-results-to-dashboard/_cm.json)*** + * get,python3 + * CM names: `--adr.['python3', 'python']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-python-lib,_wandb + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/publish-results-to-dashboard/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/publish-results-to-dashboard/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/publish-results-to-dashboard/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/publish-results-to-dashboard/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/publish-results-to-dashboard/_cm.json) + +___ +### Script output +`cmr "publish-results dashboard " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-android-sdk.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-android-sdk.md new file mode 100644 index 0000000000..45d8ea2db8 --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-android-sdk.md @@ -0,0 +1,151 @@ +Automatically generated README for this automation recipe: **get-android-sdk** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-android-sdk,8c5b4b83d49c441a) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-android-sdk)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,android,sdk,android-sdk* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get android sdk android-sdk" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,android,sdk,android-sdk` + +`cm run script --tags=get,android,sdk,android-sdk [--input_flags]` + +*or* + +`cmr "get android sdk android-sdk"` + +`cmr "get android sdk android-sdk " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,android,sdk,android-sdk' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,android,sdk,android-sdk"``` + +#### Run this script via Docker (beta) + +`cm docker script "get android sdk android-sdk" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--android_cmake_version=value` → `CM_ANDROID_CMAKE_VERSION=value` +* `--android_ndk_version=value` → `CM_ANDROID_NDK_VERSION=value` +* `--android_version=value` → `CM_ANDROID_VERSION=value` +* `--build_tools_version=value` → `CM_ANDROID_BUILD_TOOLS_VERSION=value` +* `--cmdline_tools_version=value` → `CM_ANDROID_CMDLINE_TOOLS_VERSION=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "android_cmake_version":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_ANDROID_BUILD_TOOLS_VERSION: `29.0.3` +* CM_ANDROID_CMAKE_VERSION: `3.6.4111459` +* CM_ANDROID_CMDLINE_TOOLS_URL: `https://dl.google.com/android/repository/commandlinetools-${CM_ANDROID_CMDLINE_TOOLS_OS}-${CM_ANDROID_CMDLINE_TOOLS_VERSION}_latest.zip` +* CM_ANDROID_CMDLINE_TOOLS_VERSION: `9123335` +* CM_ANDROID_NDK_VERSION: `21.3.6528147` +* CM_ANDROID_VERSION: `30` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-android-sdk/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,java + - CM script: [get-java](https://github.com/mlcommons/cm4mlops/tree/master/script/get-java) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-android-sdk/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-android-sdk/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-android-sdk/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-android-sdk/_cm.json) + +___ +### Script output +`cmr "get android sdk android-sdk " [--input_flags] -j` +#### New environment keys (filter) + +* `+PATH` +* `ANDROID_HOME` +* `ANDROID_NDK_HOME` +* `CM_ANDROID_HOME` +#### New environment keys auto-detected from customize + +* `CM_ANDROID_HOME` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-aria2.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-aria2.md new file mode 100644 index 0000000000..daf2c012d5 --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-aria2.md @@ -0,0 +1,148 @@ +Automatically generated README for this automation recipe: **get-aria2** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-aria2,d83419a90a0c40d0) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aria2)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *get,aria2,get-aria2* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get aria2 get-aria2" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,aria2,get-aria2` + +`cm run script --tags=get,aria2,get-aria2 [--input_flags]` + +*or* + +`cmr "get aria2 get-aria2"` + +`cmr "get aria2 get-aria2 " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,aria2,get-aria2' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,aria2,get-aria2"``` + +#### Run this script via Docker (beta) + +`cm docker script "get aria2 get-aria2" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--install=value` → `CM_FORCE_INSTALL=value` +* `--src=value` → `CM_ARIA2_BUILD_FROM_SRC=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "install":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aria2/_cm.yaml)*** + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aria2/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aria2/_cm.yaml) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aria2/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aria2/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aria2/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aria2/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aria2/_cm.yaml) + +___ +### Script output +`cmr "get aria2 get-aria2 " [--input_flags] -j` +#### New environment keys (filter) + +* `+PATH` +* `CM_ARIA2_*` +#### New environment keys auto-detected from customize + +* `CM_ARIA2_BIN_WITH_PATH` +* `CM_ARIA2_DOWNLOAD_DIR` +* `CM_ARIA2_DOWNLOAD_FILE` +* `CM_ARIA2_DOWNLOAD_FILE2` +* `CM_ARIA2_DOWNLOAD_URL` +* `CM_ARIA2_INSTALLED_PATH` +* `CM_ARIA2_INSTALLED_TO_CACHE` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-bazel.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-bazel.md new file mode 100644 index 0000000000..d10d39230f --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-bazel.md @@ -0,0 +1,127 @@ +Automatically generated README for this automation recipe: **get-bazel** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-bazel,eaef0be38bac493c) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-bazel)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,bazel,get-bazel* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get bazel get-bazel" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,bazel,get-bazel` + +`cm run script --tags=get,bazel,get-bazel ` + +*or* + +`cmr "get bazel get-bazel"` + +`cmr "get bazel get-bazel " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,bazel,get-bazel' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,bazel,get-bazel"``` + +#### Run this script via Docker (beta) + +`cm docker script "get bazel get-bazel" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-bazel/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-bazel/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-bazel/_cm.json)*** + * install,bazel + * Enable this dependency only if all ENV vars are set:
+`{'CM_REQUIRE_INSTALL': ['yes']}` + - CM script: [install-bazel](https://github.com/mlcommons/cm4mlops/tree/master/script/install-bazel) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-bazel/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-bazel/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-bazel/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-bazel/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-bazel/_cm.json) + +___ +### Script output +`cmr "get bazel get-bazel " -j` +#### New environment keys (filter) + +* `+PATH` +* `CM_BAZEL_*` +#### New environment keys auto-detected from customize + +* `CM_BAZEL_CACHE_TAGS` +* `CM_BAZEL_INSTALLED_PATH` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-blis.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-blis.md new file mode 100644 index 0000000000..dad973bb28 --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-blis.md @@ -0,0 +1,158 @@ +Automatically generated README for this automation recipe: **get-blis** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-blis,ea6e1cf75242456c) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-blis)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,lib,blis* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get lib blis" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,lib,blis` + +`cm run script --tags=get,lib,blis[,variations] ` + +*or* + +`cmr "get lib blis"` + +`cmr "get lib blis [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,lib,blis' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,lib,blis"``` + +#### Run this script via Docker (beta) + +`cm docker script "get lib blis[variations]" ` + +___ +### Customization + + +#### Variations + + * Group "**source**" +
+ Click here to expand this section. + + * `_amd` + - Workflow: + * **`_flame`** (default) + - Workflow: + +
+ + +#### Default variations + +`_flame` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `master` + +* `0.9.0` +* `master` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-blis/_cm.json)*** + * get,git + * CM names: `--adr.['blis-source-repo']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-blis/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-blis/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-blis/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-blis/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-blis/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-blis/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-blis/_cm.json) + +___ +### Script output +`cmr "get lib blis [,variations]" -j` +#### New environment keys (filter) + +* `+LD_LIBRARY_PATH` +* `CM_BLIS_INC` +* `CM_BLIS_INSTALL_PATH` +* `CM_BLIS_LIB` +* `CM_BLIS_SRC_PATH` +#### New environment keys auto-detected from customize + +* `CM_BLIS_INC` +* `CM_BLIS_INSTALL_PATH` +* `CM_BLIS_LIB` +* `CM_BLIS_SRC_PATH` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-brew.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-brew.md new file mode 100644 index 0000000000..7bd857bdbf --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-brew.md @@ -0,0 +1,117 @@ +Automatically generated README for this automation recipe: **get-brew** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-brew,4a2c5eab1ccf484f) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-brew)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,brew* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get brew" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,brew` + +`cm run script --tags=get,brew ` + +*or* + +`cmr "get brew"` + +`cmr "get brew " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,brew' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,brew"``` + +#### Run this script via Docker (beta) + +`cm docker script "get brew" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-brew/_cm.json) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-brew/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-brew/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-brew/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-brew/_cm.json) + +___ +### Script output +`cmr "get brew " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-cmake.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-cmake.md new file mode 100644 index 0000000000..cc5c63cd74 --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-cmake.md @@ -0,0 +1,130 @@ +Automatically generated README for this automation recipe: **get-cmake** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-cmake,52bf974d791b4fc8) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmake)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,cmake,get-cmake* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get cmake get-cmake" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,cmake,get-cmake` + +`cm run script --tags=get,cmake,get-cmake ` + +*or* + +`cmr "get cmake get-cmake"` + +`cmr "get cmake get-cmake " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,cmake,get-cmake' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,cmake,get-cmake"``` + +#### Run this script via Docker (beta) + +`cm docker script "get cmake get-cmake" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmake/_cm.json)*** + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmake/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmake/_cm.json)*** + * install,cmake,prebuilt + * Enable this dependency only if all ENV vars are set:
+`{'CM_REQUIRE_INSTALL': ['yes']}` + - CM script: [install-cmake-prebuilt](https://github.com/mlcommons/cm4mlops/tree/master/script/install-cmake-prebuilt) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmake/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmake/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmake/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmake/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmake/_cm.json) + +___ +### Script output +`cmr "get cmake get-cmake " -j` +#### New environment keys (filter) + +* `+PATH` +* `CM_CMAKE_*` +* `CM_MAKE_CORES` +#### New environment keys auto-detected from customize + +* `CM_CMAKE_CACHE_TAGS` +* `CM_MAKE_CORES` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-cmsis_5.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-cmsis_5.md new file mode 100644 index 0000000000..6e90b97e00 --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-cmsis_5.md @@ -0,0 +1,149 @@ +Automatically generated README for this automation recipe: **get-cmsis_5** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-cmsis_5,2258c212b11443f5) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmsis_5)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,cmsis,cmsis_5,arm-software* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get cmsis cmsis_5 arm-software" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,cmsis,cmsis_5,arm-software` + +`cm run script --tags=get,cmsis,cmsis_5,arm-software[,variations] ` + +*or* + +`cmr "get cmsis cmsis_5 arm-software"` + +`cmr "get cmsis cmsis_5 arm-software [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,cmsis,cmsis_5,arm-software' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,cmsis,cmsis_5,arm-software"``` + +#### Run this script via Docker (beta) + +`cm docker script "get cmsis cmsis_5 arm-software[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_recurse-submodules` + - Environment variables: + - *CM_GIT_RECURSE_SUBMODULES*: `--recurse-submodules` + - Workflow: + * `_short-history` + - Environment variables: + - *CM_GIT_DEPTH*: `--depth 10` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_GIT_DEPTH: `` +* CM_GIT_PATCH: `no` +* CM_GIT_URL: `https://github.com/ARM-software/CMSIS_5.git` + +
+ +#### Versions +Default version: `custom` + +* `custom` +* `develop` +* `master` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmsis_5/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmsis_5/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmsis_5/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmsis_5/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmsis_5/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmsis_5/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmsis_5/_cm.json) + +___ +### Script output +`cmr "get cmsis cmsis_5 arm-software [,variations]" -j` +#### New environment keys (filter) + +* `CMSIS*` +#### New environment keys auto-detected from customize diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-docker.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-docker.md new file mode 100644 index 0000000000..a836ce6de1 --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-docker.md @@ -0,0 +1,119 @@ +Automatically generated README for this automation recipe: **get-docker** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-docker,6192accce4234084) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-docker)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,install,docker,engine* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get install docker engine" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,install,docker,engine` + +`cm run script --tags=get,install,docker,engine ` + +*or* + +`cmr "get install docker engine"` + +`cmr "get install docker engine " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,install,docker,engine' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,install,docker,engine"``` + +#### Run this script via Docker (beta) + +`cm docker script "get install docker engine" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-docker/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-docker/_cm.json) + 1. ***Run native script if exists*** + * [run-ubuntu.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-docker/run-ubuntu.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-docker/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-docker/_cm.json) + +___ +### Script output +`cmr "get install docker engine " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-generic-sys-util.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-generic-sys-util.md new file mode 100644 index 0000000000..ef0ece7f96 --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-generic-sys-util.md @@ -0,0 +1,227 @@ +Automatically generated README for this automation recipe: **get-generic-sys-util** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-generic-sys-util,bb0393afa8404a11) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-sys-util)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,sys-util,generic,generic-sys-util* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get sys-util generic generic-sys-util" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,sys-util,generic,generic-sys-util` + +`cm run script --tags=get,sys-util,generic,generic-sys-util[,variations] ` + +*or* + +`cmr "get sys-util generic generic-sys-util"` + +`cmr "get sys-util generic generic-sys-util [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,sys-util,generic,generic-sys-util' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,sys-util,generic,generic-sys-util"``` + +#### Run this script via Docker (beta) + +`cm docker script "get sys-util generic generic-sys-util[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_g++-12` + - Environment variables: + - *CM_SYS_UTIL_NAME*: `g++12` + - Workflow: + * `_gflags-dev` + - Environment variables: + - *CM_SYS_UTIL_NAME*: `gflags-dev` + - Workflow: + * `_git-lfs` + - Environment variables: + - *CM_SYS_UTIL_NAME*: `git-lfs` + - Workflow: + * `_glog-dev` + - Environment variables: + - *CM_SYS_UTIL_NAME*: `glog-dev` + - Workflow: + * `_libboost-all-dev` + - Environment variables: + - *CM_SYS_UTIL_NAME*: `libboost-all-dev` + - Workflow: + * `_libffi7` + - Environment variables: + - *CM_SYS_UTIL_NAME*: `libffi7` + - Workflow: + * `_libgmock-dev` + - Environment variables: + - *CM_SYS_UTIL_NAME*: `libgmock-dev` + - Workflow: + * `_libmpfr-dev` + - Environment variables: + - *CM_SYS_UTIL_NAME*: `libmpfr-dev` + - Workflow: + * `_libnuma-dev` + - Environment variables: + - *CM_SYS_UTIL_NAME*: `libnuma-dev` + - Workflow: + * `_libpci-dev` + - Environment variables: + - *CM_SYS_UTIL_NAME*: `libpci-dev` + - Workflow: + * `_libre2-dev` + - Environment variables: + - *CM_SYS_UTIL_NAME*: `libre2-dev` + - Workflow: + * `_libudev-dev` + - Environment variables: + - *CM_SYS_UTIL_NAME*: `libudev-dev` + - Workflow: + * `_ninja-build` + - Environment variables: + - *CM_SYS_UTIL_NAME*: `ninja-build` + - Workflow: + * `_ntpdate` + - Environment variables: + - *CM_SYS_UTIL_NAME*: `ntpdate` + - Workflow: + * `_numactl` + - Environment variables: + - *CM_SYS_UTIL_NAME*: `numactl` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * install,numactl,from.src + * Enable this dependency only if all ENV vars are set:
+`{'CM_HOST_OS_FLAVOR': ['rhel'], 'CM_HOST_OS_VERSION': ['9.1', '9.2', '9.3']}` + - CM script: [install-numactl-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-numactl-from-src) + * `_nvidia-cuda-toolkit` + - Environment variables: + - *CM_SYS_UTIL_NAME*: `nvidia-cuda-toolkit` + - Workflow: + * `_rapidjson-dev` + - Environment variables: + - *CM_SYS_UTIL_NAME*: `rapidjson-dev` + - Workflow: + * `_rsync` + - Environment variables: + - *CM_SYS_UTIL_NAME*: `rsync` + - Workflow: + * `_screen` + - Environment variables: + - *CM_SYS_UTIL_NAME*: `screen` + - Workflow: + * `_sox` + - Environment variables: + - *CM_SYS_UTIL_NAME*: `sox` + - Workflow: + * `_transmission` + - Environment variables: + - *CM_SYS_UTIL_NAME*: `transmission` + - Workflow: + * `_zlib` + - Environment variables: + - *CM_SYS_UTIL_NAME*: `zlib` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_CLEAN_DIRS: `bin` +* CM_SUDO: `sudo` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-sys-util/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-sys-util/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-sys-util/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-sys-util/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-sys-util/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-sys-util/_cm.json) + +___ +### Script output +`cmr "get sys-util generic generic-sys-util [,variations]" -j` +#### New environment keys (filter) + +* `+PATH` +#### New environment keys auto-detected from customize diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-google-test.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-google-test.md new file mode 100644 index 0000000000..1859603832 --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-google-test.md @@ -0,0 +1,137 @@ +Automatically generated README for this automation recipe: **get-google-test** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-google-test,02945138a5614253) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-test)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,google-test,googletest,gtest,test,google* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get google-test googletest gtest test google" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,google-test,googletest,gtest,test,google` + +`cm run script --tags=get,google-test,googletest,gtest,test,google ` + +*or* + +`cmr "get google-test googletest gtest test google"` + +`cmr "get google-test googletest gtest test google " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,google-test,googletest,gtest,test,google' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,google-test,googletest,gtest,test,google"``` + +#### Run this script via Docker (beta) + +`cm docker script "get google-test googletest gtest test google" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `1.14.0` + +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-test/_cm.json)*** + * get,cmake + * CM names: `--adr.['cmake']...` + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + * get,compiler + * CM names: `--adr.['compiler']...` + - CM script: [get-cl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cl) + - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) + - CM script: [get-llvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-llvm) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-test/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-test/_cm.json)*** + * get,git,repo,_repo.https://github.com/google/googletest.git + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-test/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-test/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-test/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-test/_cm.json) + +___ +### Script output +`cmr "get google-test googletest gtest test google " -j` +#### New environment keys (filter) + +* `+C_INCLUDE_PATH` +* `+LD_LIBRARY_PATH` +* `CM_GOOGLE_TEST_INSTALL_PATH` +* `CM_GOOGLE_TEST_SRC_PATH` +#### New environment keys auto-detected from customize + +* `CM_GOOGLE_TEST_INSTALL_PATH` +* `CM_GOOGLE_TEST_SRC_PATH` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-java.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-java.md new file mode 100644 index 0000000000..ae31b39ca6 --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-java.md @@ -0,0 +1,165 @@ +Automatically generated README for this automation recipe: **get-java** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-java,9399d0e785704f8c) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-java)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,java* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get java" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,java` + +`cm run script --tags=get,java[,variations] [--input_flags]` + +*or* + +`cmr "get java"` + +`cmr "get java [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,java' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,java"``` + +#### Run this script via Docker (beta) + +`cm docker script "get java[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_install` + - Environment variables: + - *CM_JAVA_PREBUILT_INSTALL*: `on` + - Workflow: + +
+ + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--install=value` → `CM_JAVA_PREBUILT_INSTALL=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "install":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_JAVA_PREBUILT_VERSION: `19` +* CM_JAVA_PREBUILT_BUILD: `36` +* CM_JAVA_PREBUILT_URL: `https://download.java.net/openjdk/jdk${CM_JAVA_PREBUILT_VERSION}/ri/` +* CM_JAVA_PREBUILT_FILENAME: `openjdk-${CM_JAVA_PREBUILT_VERSION}+${CM_JAVA_PREBUILT_BUILD}_${CM_JAVA_PREBUILT_HOST_OS}-x64_bin` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-java/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-java/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-java/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-java/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-java/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-java/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-java/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-java/_cm.json) + +___ +### Script output +`cmr "get java [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `+PATH` +* `CM_JAVA_*` +* `JAVA_HOME` +#### New environment keys auto-detected from customize + +* `CM_JAVA_BIN` +* `CM_JAVA_CACHE_TAGS` +* `CM_JAVA_PREBUILT_EXT` +* `CM_JAVA_PREBUILT_FILENAME` +* `CM_JAVA_PREBUILT_HOST_OS` +* `CM_JAVA_PREBUILT_URL` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-javac.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-javac.md new file mode 100644 index 0000000000..fbb21c1234 --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-javac.md @@ -0,0 +1,168 @@ +Automatically generated README for this automation recipe: **get-javac** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-javac,509280c497b24226) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-javac)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,javac* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get javac" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,javac` + +`cm run script --tags=get,javac[,variations] [--input_flags]` + +*or* + +`cmr "get javac"` + +`cmr "get javac [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,javac' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,javac"``` + +#### Run this script via Docker (beta) + +`cm docker script "get javac[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_install` + - Environment variables: + - *CM_JAVAC_PREBUILT_INSTALL*: `on` + - Workflow: + +
+ + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--install=value` → `CM_JAVAC_PREBUILT_INSTALL=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "install":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_JAVAC_PREBUILT_VERSION: `19` +* CM_JAVAC_PREBUILT_BUILD: `36` +* CM_JAVAC_PREBUILT_URL: `https://download.java.net/openjdk/jdk${CM_JAVAC_PREBUILT_VERSION}/ri/` +* CM_JAVAC_PREBUILT_FILENAME: `openjdk-${CM_JAVAC_PREBUILT_VERSION}+${CM_JAVAC_PREBUILT_BUILD}_${CM_JAVAC_PREBUILT_HOST_OS}-x64_bin` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-javac/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-javac/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-javac/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-javac/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-javac/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-javac/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-javac/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-javac/_cm.json) + +___ +### Script output +`cmr "get javac [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `+PATH` +* `CM_JAVAC_*` +* `CM_JAVA_*` +* `JAVA_HOME` +#### New environment keys auto-detected from customize + +* `CM_JAVAC_BIN` +* `CM_JAVAC_CACHE_TAGS` +* `CM_JAVAC_PREBUILT_EXT` +* `CM_JAVAC_PREBUILT_FILENAME` +* `CM_JAVAC_PREBUILT_HOST_OS` +* `CM_JAVAC_PREBUILT_URL` +* `CM_JAVA_BIN` +* `CM_JAVA_BIN_WITH_PATH` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-lib-armnn.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-lib-armnn.md new file mode 100644 index 0000000000..745886d070 --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-lib-armnn.md @@ -0,0 +1,132 @@ +Automatically generated README for this automation recipe: **get-lib-armnn** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-lib-armnn,9603a2e90fd44587) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-armnn)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,lib-armnn,lib,armnn* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get lib-armnn lib armnn" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,lib-armnn,lib,armnn` + +`cm run script --tags=get,lib-armnn,lib,armnn ` + +*or* + +`cmr "get lib-armnn lib armnn"` + +`cmr "get lib-armnn lib armnn " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,lib-armnn,lib,armnn' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,lib-armnn,lib,armnn"``` + +#### Run this script via Docker (beta) + +`cm docker script "get lib-armnn lib armnn" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `23.11` + +* `22.11` +* `23.05` +* `23.11` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-armnn/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-armnn/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-armnn/_cm.json)*** + * get,git,repo,_repo.https://github.com/ARM-software/armnn + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-armnn/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-armnn/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-armnn/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-armnn/_cm.json) + +___ +### Script output +`cmr "get lib-armnn lib armnn " -j` +#### New environment keys (filter) + +* `+CPLUS_INCLUDE_PATH` +* `+C_INCLUDE_PATH` +* `+LD_LIBRARY_PATH` +* `CM_LIB_ARMNN_VERSION` +* `CM_LIB_DNNL_*` +#### New environment keys auto-detected from customize diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-lib-dnnl.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-lib-dnnl.md new file mode 100644 index 0000000000..e12b39926b --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-lib-dnnl.md @@ -0,0 +1,132 @@ +Automatically generated README for this automation recipe: **get-lib-dnnl** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-lib-dnnl,1cd35a6a3b0b4530) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-dnnl)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,lib-dnnl,lib,dnnl* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get lib-dnnl lib dnnl" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,lib-dnnl,lib,dnnl` + +`cm run script --tags=get,lib-dnnl,lib,dnnl ` + +*or* + +`cmr "get lib-dnnl lib dnnl"` + +`cmr "get lib-dnnl lib dnnl " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,lib-dnnl,lib,dnnl' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,lib-dnnl,lib,dnnl"``` + +#### Run this script via Docker (beta) + +`cm docker script "get lib-dnnl lib dnnl" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `dev` + +* `2.2.4` +* `dev` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-dnnl/_cm.json)*** + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * cmake,get-cmake + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-dnnl/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-dnnl/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-dnnl/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-dnnl/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-dnnl/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-dnnl/_cm.json) + +___ +### Script output +`cmr "get lib-dnnl lib dnnl " -j` +#### New environment keys (filter) + +* `+CPLUS_INCLUDE_PATH` +* `+C_INCLUDE_PATH` +* `+LD_LIBRARY_PATH` +* `CM_LIB_DNNL_*` +#### New environment keys auto-detected from customize + +* `CM_LIB_DNNL_INSTALL_DIR` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-lib-protobuf.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-lib-protobuf.md new file mode 100644 index 0000000000..80608d46db --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-lib-protobuf.md @@ -0,0 +1,154 @@ +Automatically generated README for this automation recipe: **get-lib-protobuf** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-lib-protobuf,db45f1eb73934f91) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-protobuf)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,google-protobuf,protobuf,lib,lib-protobuf,google* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get google-protobuf protobuf lib lib-protobuf google" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,google-protobuf,protobuf,lib,lib-protobuf,google` + +`cm run script --tags=get,google-protobuf,protobuf,lib,lib-protobuf,google[,variations] ` + +*or* + +`cmr "get google-protobuf protobuf lib lib-protobuf google"` + +`cmr "get google-protobuf protobuf lib lib-protobuf google [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,google-protobuf,protobuf,lib,lib-protobuf,google' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,google-protobuf,protobuf,lib,lib-protobuf,google"``` + +#### Run this script via Docker (beta) + +`cm docker script "get google-protobuf protobuf lib lib-protobuf google[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_branch.#` + - Environment variables: + - *CM_TMP_GIT_CHECKOUT*: `#` + - Workflow: + * `_tag.#` + - Environment variables: + - *CM_GIT_CHECKOUT_TAG*: `#` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `1.13.0` + +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-protobuf/_cm.json)*** + * get,cmake + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + * get,gcc + - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-protobuf/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-protobuf/_cm.json)*** + * get,git,repo,_repo.https://github.com/google/protobuf.git + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-protobuf/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-protobuf/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-protobuf/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-protobuf/_cm.json) + +___ +### Script output +`cmr "get google-protobuf protobuf lib lib-protobuf google [,variations]" -j` +#### New environment keys (filter) + +* `+CPLUS_INCLUDE_PATH` +* `+C_INCLUDE_PATH` +* `+LD_LIBRARY_PATH` +* `CM_GOOGLE_PROTOBUF_INSTALL_PATH` +* `CM_GOOGLE_PROTOBUF_SRC_PATH` +#### New environment keys auto-detected from customize + +* `CM_GOOGLE_PROTOBUF_INSTALL_PATH` +* `CM_GOOGLE_PROTOBUF_SRC_PATH` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-lib-qaic-api.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-lib-qaic-api.md new file mode 100644 index 0000000000..243dc586ff --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-lib-qaic-api.md @@ -0,0 +1,131 @@ +Automatically generated README for this automation recipe: **get-lib-qaic-api** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-lib-qaic-api,1e253ae184e44f23) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-qaic-api)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,api,lib-qaic-api,lib,qaic* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get api lib-qaic-api lib qaic" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,api,lib-qaic-api,lib,qaic` + +`cm run script --tags=get,api,lib-qaic-api,lib,qaic ` + +*or* + +`cmr "get api lib-qaic-api lib qaic"` + +`cmr "get api lib-qaic-api lib qaic " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,api,lib-qaic-api,lib,qaic' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,api,lib-qaic-api,lib,qaic"``` + +#### Run this script via Docker (beta) + +`cm docker script "get api lib-qaic-api lib qaic" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `master` + +* `master` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-qaic-api/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-qaic-api/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-qaic-api/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-qaic-api/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-qaic-api/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-qaic-api/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-qaic-api/_cm.json) + +___ +### Script output +`cmr "get api lib-qaic-api lib qaic " -j` +#### New environment keys (filter) + +* `+CPLUS_INCLUDE_PATH` +* `+C_INCLUDE_PATH` +* `+LD_LIBRARY_PATH` +* `CM_LIB_QAIC_*` +* `CM_QAIC_API_*` +#### New environment keys auto-detected from customize + +* `CM_QAIC_API_INC_FILE` +* `CM_QAIC_API_SRC_FILE` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-nvidia-docker.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-nvidia-docker.md new file mode 100644 index 0000000000..284218740d --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-nvidia-docker.md @@ -0,0 +1,121 @@ +Automatically generated README for this automation recipe: **get-nvidia-docker** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-nvidia-docker,465ae240998e4779) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-docker)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,install,nvidia,nvidia-container-toolkit,nvidia-docker,engine* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get install nvidia nvidia-container-toolkit nvidia-docker engine" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,install,nvidia,nvidia-container-toolkit,nvidia-docker,engine` + +`cm run script --tags=get,install,nvidia,nvidia-container-toolkit,nvidia-docker,engine ` + +*or* + +`cmr "get install nvidia nvidia-container-toolkit nvidia-docker engine"` + +`cmr "get install nvidia nvidia-container-toolkit nvidia-docker engine " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,install,nvidia,nvidia-container-toolkit,nvidia-docker,engine' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,install,nvidia,nvidia-container-toolkit,nvidia-docker,engine"``` + +#### Run this script via Docker (beta) + +`cm docker script "get install nvidia nvidia-container-toolkit nvidia-docker engine" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-docker/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,docker + - CM script: [get-docker](https://github.com/mlcommons/cm4mlops/tree/master/script/get-docker) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-docker/_cm.json) + 1. ***Run native script if exists*** + * [run-ubuntu.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-docker/run-ubuntu.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-docker/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-docker/_cm.json) + +___ +### Script output +`cmr "get install nvidia nvidia-container-toolkit nvidia-docker engine " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-openssl.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-openssl.md new file mode 100644 index 0000000000..a348f12728 --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-openssl.md @@ -0,0 +1,125 @@ +Automatically generated README for this automation recipe: **get-openssl** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-openssl,febdae70e9e64e30) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-openssl)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,openssl,lib,lib-openssl* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get openssl lib lib-openssl" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,openssl,lib,lib-openssl` + +`cm run script --tags=get,openssl,lib,lib-openssl ` + +*or* + +`cmr "get openssl lib lib-openssl"` + +`cmr "get openssl lib lib-openssl " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,openssl,lib,lib-openssl' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,openssl,lib,lib-openssl"``` + +#### Run this script via Docker (beta) + +`cm docker script "get openssl lib lib-openssl" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-openssl/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-openssl/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-openssl/_cm.json)*** + * install,openssl + * Enable this dependency only if all ENV vars are set:
+`{'CM_REQUIRE_INSTALL': ['yes']}` + - CM script: [install-openssl](https://github.com/mlcommons/cm4mlops/tree/master/script/install-openssl) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-openssl/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-openssl/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-openssl/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-openssl/_cm.json) + +___ +### Script output +`cmr "get openssl lib lib-openssl " -j` +#### New environment keys (filter) + +* `+LD_LIBRARY_PATH` +* `CM_OPENSSL_*` +#### New environment keys auto-detected from customize + +* `CM_OPENSSL_INSTALLED_PATH` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-rclone.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-rclone.md new file mode 100644 index 0000000000..6dcd4adda1 --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-rclone.md @@ -0,0 +1,150 @@ +Automatically generated README for this automation recipe: **get-rclone** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-rclone,22ffb43c49c9419e) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rclone)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,rclone* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get rclone" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,rclone` + +`cm run script --tags=get,rclone[,variations] ` + +*or* + +`cmr "get rclone"` + +`cmr "get rclone [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,rclone' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,rclone"``` + +#### Run this script via Docker (beta) + +`cm docker script "get rclone[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_gdrive` + - Environment variables: + - *CM_RCLONE_GDRIVE*: `yes` + - Workflow: + * `_system` + - Environment variables: + - *CM_RCLONE_SYSTEM*: `yes` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `1.65.2` + +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rclone/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rclone/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rclone/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rclone/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rclone/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rclone/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rclone/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rclone/_cm.json) + +___ +### Script output +`cmr "get rclone [,variations]" -j` +#### New environment keys (filter) + +* `+PATH` +* `CM_RCLONE_BIN_WITH_PATH` +* `CM_RCLONE_CACHE_TAGS` +* `CM_RCLONE_VERSION` +#### New environment keys auto-detected from customize + +* `CM_RCLONE_BIN_WITH_PATH` +* `CM_RCLONE_CACHE_TAGS` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-sys-utils-cm.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-sys-utils-cm.md new file mode 100644 index 0000000000..143e07b7d6 --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-sys-utils-cm.md @@ -0,0 +1,156 @@ +Automatically generated README for this automation recipe: **get-sys-utils-cm** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-sys-utils-cm,bc90993277e84b8e) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-cm)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *get,sys-utils-cm* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get sys-utils-cm" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,sys-utils-cm` + +`cm run script --tags=get,sys-utils-cm[,variations] [--input_flags]` + +*or* + +`cmr "get sys-utils-cm"` + +`cmr "get sys-utils-cm [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,sys-utils-cm' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,sys-utils-cm"``` + +#### Run this script via Docker (beta) + +`cm docker script "get sys-utils-cm[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_user` + - Environment variables: + - *CM_PYTHON_PIP_USER*: `--user` + - Workflow: + +
+ + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--skip=value` → `CM_SKIP_SYS_UTILS=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "skip":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-cm/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-cm/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-cm/_cm.yaml) + 1. ***Run native script if exists*** + * [run-arch.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-cm/run-arch.sh) + * [run-debian.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-cm/run-debian.sh) + * [run-macos.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-cm/run-macos.sh) + * [run-rhel.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-cm/run-rhel.sh) + * [run-sles.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-cm/run-sles.sh) + * [run-ubuntu.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-cm/run-ubuntu.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-cm/_cm.yaml) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-cm/_cm.yaml) + +___ +### Script output +`cmr "get sys-utils-cm [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `+PATH` +#### New environment keys auto-detected from customize diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-sys-utils-min.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-sys-utils-min.md new file mode 100644 index 0000000000..c200ad0d08 --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-sys-utils-min.md @@ -0,0 +1,117 @@ +Automatically generated README for this automation recipe: **get-sys-utils-min** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-sys-utils-min,a9af7714d3d94779) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-min)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,sys-utils-min* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get sys-utils-min" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,sys-utils-min` + +`cm run script --tags=get,sys-utils-min ` + +*or* + +`cmr "get sys-utils-min"` + +`cmr "get sys-utils-min " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,sys-utils-min' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,sys-utils-min"``` + +#### Run this script via Docker (beta) + +`cm docker script "get sys-utils-min" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-min/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-min/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-min/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-min/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-min/_cm.json) + +___ +### Script output +`cmr "get sys-utils-min " -j` +#### New environment keys (filter) + +* `+PATH` +#### New environment keys auto-detected from customize diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-xilinx-sdk.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-xilinx-sdk.md new file mode 100644 index 0000000000..aa2f7dba5a --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-xilinx-sdk.md @@ -0,0 +1,138 @@ +Automatically generated README for this automation recipe: **get-xilinx-sdk** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-xilinx-sdk,76d4d1bd09df4490) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-xilinx-sdk)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,xilinx,sdk* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get xilinx sdk" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,xilinx,sdk` + +`cm run script --tags=get,xilinx,sdk [--input_flags]` + +*or* + +`cmr "get xilinx sdk"` + +`cmr "get xilinx sdk " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,xilinx,sdk' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,xilinx,sdk"``` + +#### Run this script via Docker (beta) + +`cm docker script "get xilinx sdk" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--input=value` → `CM_XILINX_SDK_FILE_PATH=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "input":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `2019.1` + +* `2019.1` +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-xilinx-sdk/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-xilinx-sdk/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-xilinx-sdk/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-xilinx-sdk/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-xilinx-sdk/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-xilinx-sdk/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-xilinx-sdk/_cm.json) + +___ +### Script output +`cmr "get xilinx sdk " [--input_flags] -j` +#### New environment keys (filter) + +* `+PATH` +* `CM_XILINX_*` +#### New environment keys auto-detected from customize diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-zendnn.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-zendnn.md new file mode 100644 index 0000000000..acf66bf023 --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-zendnn.md @@ -0,0 +1,127 @@ +Automatically generated README for this automation recipe: **get-zendnn** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-zendnn,d1c6feb0ee684b09) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zendnn)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,zendnn,amd,from.src* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get zendnn amd from.src" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,zendnn,amd,from.src` + +`cm run script --tags=get,zendnn,amd,from.src ` + +*or* + +`cmr "get zendnn amd from.src"` + +`cmr "get zendnn amd from.src " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,zendnn,amd,from.src' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,zendnn,amd,from.src"``` + +#### Run this script via Docker (beta) + +`cm docker script "get zendnn amd from.src" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zendnn/_cm.json)*** + * get,amd,aocl + * CM names: `--adr.['aocl']...` + - CM script: [get-aocl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-aocl) + * get,lib,blis,_amd + - CM script: [get-blis](https://github.com/mlcommons/cm4mlops/tree/master/script/get-blis) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,git,_repo.https://github.com/amd/ZenDNN.git + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zendnn/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zendnn/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zendnn/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zendnn/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zendnn/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zendnn/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zendnn/_cm.json) + +___ +### Script output +`cmr "get zendnn amd from.src " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/install-bazel.md b/docs/Detection-or-installation-of-tools-and-artifacts/install-bazel.md new file mode 100644 index 0000000000..8787010eff --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/install-bazel.md @@ -0,0 +1,134 @@ +Automatically generated README for this automation recipe: **install-bazel** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-bazel,dfd3d2bf5b764175) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-bazel)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,script,bazel* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install script bazel" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,script,bazel` + +`cm run script --tags=install,script,bazel ` + +*or* + +`cmr "install script bazel"` + +`cmr "install script bazel " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,script,bazel' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,script,bazel"``` + +#### Run this script via Docker (beta) + +`cm docker script "install script bazel" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `7.0.2` + +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-bazel/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-bazel/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-bazel/_cm.json) + 1. ***Run native script if exists*** + * [run-aarch64.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-bazel/run-aarch64.sh) + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-bazel/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-bazel/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-bazel/_cm.json) + 1. Run "postrocess" function from customize.py + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-bazel/_cm.json)*** + * get,bazel + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_REQUIRE_INSTALL': ['yes']}` + - CM script: [get-bazel](https://github.com/mlcommons/cm4mlops/tree/master/script/get-bazel) + +___ +### Script output +`cmr "install script bazel " -j` +#### New environment keys (filter) + +* `CM_BAZEL_*` +#### New environment keys auto-detected from customize + +* `CM_BAZEL_BIN_WITH_PATH` +* `CM_BAZEL_DOWNLOAD_FILE` +* `CM_BAZEL_DOWNLOAD_URL` +* `CM_BAZEL_INSTALLED_PATH` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/install-cmake-prebuilt.md b/docs/Detection-or-installation-of-tools-and-artifacts/install-cmake-prebuilt.md new file mode 100644 index 0000000000..af835b71b1 --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/install-cmake-prebuilt.md @@ -0,0 +1,136 @@ +Automatically generated README for this automation recipe: **install-cmake-prebuilt** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-cmake-prebuilt,5a39ef05992b4103) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cmake-prebuilt)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,prebuilt,cmake,prebuilt-cmake,install-prebuilt-cmake* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install prebuilt cmake prebuilt-cmake install-prebuilt-cmake" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,prebuilt,cmake,prebuilt-cmake,install-prebuilt-cmake` + +`cm run script --tags=install,prebuilt,cmake,prebuilt-cmake,install-prebuilt-cmake ` + +*or* + +`cmr "install prebuilt cmake prebuilt-cmake install-prebuilt-cmake"` + +`cmr "install prebuilt cmake prebuilt-cmake install-prebuilt-cmake " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,prebuilt,cmake,prebuilt-cmake,install-prebuilt-cmake' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,prebuilt,cmake,prebuilt-cmake,install-prebuilt-cmake"``` + +#### Run this script via Docker (beta) + +`cm docker script "install prebuilt cmake prebuilt-cmake install-prebuilt-cmake" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `3.28.3` + +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cmake-prebuilt/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cmake-prebuilt/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cmake-prebuilt/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cmake-prebuilt/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cmake-prebuilt/_cm.json) + 1. Run "postrocess" function from customize.py + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cmake-prebuilt/_cm.json)*** + * get,cmake + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_REQUIRE_INSTALL': ['yes']}` + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + +___ +### Script output +`cmr "install prebuilt cmake prebuilt-cmake install-prebuilt-cmake " -j` +#### New environment keys (filter) + +* `+C_INCLUDE_PATH` +* `+LD_LIBRARY_PATH` +* `+PATH` +* `CM_CMAKE_*` +* `CM_GET_DEPENDENT_CACHED_PATH` +#### New environment keys auto-detected from customize + +* `CM_CMAKE_BIN_WITH_PATH` +* `CM_CMAKE_INSTALLED_PATH` +* `CM_CMAKE_PACKAGE` +* `CM_GET_DEPENDENT_CACHED_PATH` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/install-gflags.md b/docs/Detection-or-installation-of-tools-and-artifacts/install-gflags.md new file mode 100644 index 0000000000..dc4ab3c751 --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/install-gflags.md @@ -0,0 +1,127 @@ +Automatically generated README for this automation recipe: **install-gflags** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-gflags,10bb562c29ea459e) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gflags)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,src,get,gflags* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install src get gflags" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,src,get,gflags` + +`cm run script --tags=install,src,get,gflags ` + +*or* + +`cmr "install src get gflags"` + +`cmr "install src get gflags " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,src,get,gflags' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,src,get,gflags"``` + +#### Run this script via Docker (beta) + +`cm docker script "install src get gflags" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `2.2.2` + +* `2.2.2` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gflags/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,cmake + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gflags/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gflags/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gflags/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gflags/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gflags/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gflags/_cm.json) + +___ +### Script output +`cmr "install src get gflags " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/install-github-cli.md b/docs/Detection-or-installation-of-tools-and-artifacts/install-github-cli.md new file mode 100644 index 0000000000..65cfb01cb6 --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/install-github-cli.md @@ -0,0 +1,121 @@ +Automatically generated README for this automation recipe: **install-github-cli** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-github-cli,cd948ec309344bf8) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-github-cli)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,gh,github,cli,github-cli* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install gh github cli github-cli" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,gh,github,cli,github-cli` + +`cm run script --tags=install,gh,github,cli,github-cli ` + +*or* + +`cmr "install gh github cli github-cli"` + +`cmr "install gh github cli github-cli " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,gh,github,cli,github-cli' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,gh,github,cli,github-cli"``` + +#### Run this script via Docker (beta) + +`cm docker script "install gh github cli github-cli" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-github-cli/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-github-cli/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-github-cli/_cm.json) + 1. ***Run native script if exists*** + * [run-macos.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-github-cli/run-macos.sh) + * [run-rhel.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-github-cli/run-rhel.sh) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-github-cli/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-github-cli/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-github-cli/_cm.json) + +___ +### Script output +`cmr "install gh github cli github-cli " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/install-numactl-from-src.md b/docs/Detection-or-installation-of-tools-and-artifacts/install-numactl-from-src.md new file mode 100644 index 0000000000..dd69d6889a --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/install-numactl-from-src.md @@ -0,0 +1,170 @@ +Automatically generated README for this automation recipe: **install-numactl-from-src** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-numactl-from-src,4f355ae8ca1948b2) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-numactl-from-src)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,src,from.src,numactl,src-numactl* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install src from.src numactl src-numactl" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,src,from.src,numactl,src-numactl` + +`cm run script --tags=install,src,from.src,numactl,src-numactl[,variations] ` + +*or* + +`cmr "install src from.src numactl src-numactl"` + +`cmr "install src from.src numactl src-numactl [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,src,from.src,numactl,src-numactl' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,src,from.src,numactl,src-numactl"``` + +#### Run this script via Docker (beta) + +`cm docker script "install src from.src numactl src-numactl[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_branch.#` + - Environment variables: + - *CM_GIT_CHECKOUT*: `#` + - Workflow: + * `_sha.#` + - Environment variables: + - *CM_GIT_CHECKOUT_SHA*: `#` + - Workflow: + * `_tag.#` + - Environment variables: + - *CM_GIT_CHECKOUT_TAG*: `#` + - Workflow: + +
+ + + * Group "**repo**" +
+ Click here to expand this section. + + * `_repo.#` + - Environment variables: + - *CM_GIT_URL*: `#` + - Workflow: + * **`_repo.https://github.com/numactl/numactl`** (default) + - Environment variables: + - *CM_GIT_URL*: `https://github.com/numactl/numactl` + - Workflow: + +
+ + +#### Default variations + +`_repo.https://github.com/numactl/numactl` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-numactl-from-src/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,git,repo + * CM names: `--adr.['numactl-src-repo']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-numactl-from-src/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-numactl-from-src/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-numactl-from-src/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-numactl-from-src/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-numactl-from-src/_cm.json) + +___ +### Script output +`cmr "install src from.src numactl src-numactl [,variations]" -j` +#### New environment keys (filter) + +* `+PATH` +* `CM_NUMACTL_*` +#### New environment keys auto-detected from customize diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/install-openssl.md b/docs/Detection-or-installation-of-tools-and-artifacts/install-openssl.md new file mode 100644 index 0000000000..7aec6efced --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/install-openssl.md @@ -0,0 +1,134 @@ +Automatically generated README for this automation recipe: **install-openssl** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-openssl,be472d3b1d014169) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-openssl)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,src,openssl,openssl-lib* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install src openssl openssl-lib" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,src,openssl,openssl-lib` + +`cm run script --tags=install,src,openssl,openssl-lib ` + +*or* + +`cmr "install src openssl openssl-lib"` + +`cmr "install src openssl openssl-lib " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,src,openssl,openssl-lib' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,src,openssl,openssl-lib"``` + +#### Run this script via Docker (beta) + +`cm docker script "install src openssl openssl-lib" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `1.1.1` + +* `1.1.1` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-openssl/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-openssl/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-openssl/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-openssl/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-openssl/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-openssl/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-openssl/_cm.json)*** + * get,openssl + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_REQUIRE_INSTALL': ['yes']}` + - CM script: [get-openssl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-openssl) + +___ +### Script output +`cmr "install src openssl openssl-lib " -j` +#### New environment keys (filter) + +* `+LD_LIBRARY_PATH` +* `CM_OPENSSL_*` +#### New environment keys auto-detected from customize + +* `CM_OPENSSL_BIN_WITH_PATH` +* `CM_OPENSSL_INSTALLED_PATH` \ No newline at end of file diff --git a/docs/DevOps-automation/benchmark-program.md b/docs/DevOps-automation/benchmark-program.md new file mode 100644 index 0000000000..cd0bbeba32 --- /dev/null +++ b/docs/DevOps-automation/benchmark-program.md @@ -0,0 +1,151 @@ +Automatically generated README for this automation recipe: **benchmark-program** + +Category: **DevOps automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=benchmark-program,19f369ef47084895) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *benchmark,program* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "benchmark program" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=benchmark,program` + +`cm run script --tags=benchmark,program[,variations] ` + +*or* + +`cmr "benchmark program"` + +`cmr "benchmark program [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'benchmark,program' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="benchmark,program"``` + +#### Run this script via Docker (beta) + +`cm docker script "benchmark program[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_numactl` + - Workflow: + * `_numactl-interleave` + - Workflow: + * `_profile` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,profiler + - *Warning: no scripts found* + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_ENABLE_NUMACTL: `0` +* CM_ENABLE_PROFILING: `0` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program/_cm.json)*** + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * set,performance,mode,_performance + * Enable this dependency only if all ENV vars are set:
+`{'CM_SET_PERFORMANCE_MODE': ['on', 'yes', 'True', True]}` + - CM script: [set-performance-mode](https://github.com/mlcommons/cm4mlops/tree/master/script/set-performance-mode) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program/_cm.json) + 1. ***Run native script if exists*** + * [run-ubuntu.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program/run-ubuntu.sh) + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program/_cm.json) + +___ +### Script output +`cmr "benchmark program [,variations]" -j` +#### New environment keys (filter) + +* `CM_RUN_CMD` +#### New environment keys auto-detected from customize + +* `CM_RUN_CMD` \ No newline at end of file diff --git a/docs/DevOps-automation/compile-program.md b/docs/DevOps-automation/compile-program.md new file mode 100644 index 0000000000..057fdfe98d --- /dev/null +++ b/docs/DevOps-automation/compile-program.md @@ -0,0 +1,128 @@ +Automatically generated README for this automation recipe: **compile-program** + +Category: **DevOps automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=compile-program,c05042ba005a4bfa) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-program)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *compile,program,c-program,cpp-program,compile-program,compile-c-program,compile-cpp-program* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "compile program c-program cpp-program compile-program compile-c-program compile-cpp-program" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=compile,program,c-program,cpp-program,compile-program,compile-c-program,compile-cpp-program` + +`cm run script --tags=compile,program,c-program,cpp-program,compile-program,compile-c-program,compile-cpp-program ` + +*or* + +`cmr "compile program c-program cpp-program compile-program compile-c-program compile-cpp-program"` + +`cmr "compile program c-program cpp-program compile-program compile-c-program compile-cpp-program " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'compile,program,c-program,cpp-program,compile-program,compile-c-program,compile-cpp-program' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="compile,program,c-program,cpp-program,compile-program,compile-c-program,compile-cpp-program"``` + +#### Run this script via Docker (beta) + +`cm docker script "compile program c-program cpp-program compile-program compile-c-program compile-cpp-program" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* SKIP_RECOMPILE: `no` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-program/_cm.json)*** + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,compiler + * CM names: `--adr.['compiler']...` + - CM script: [get-cl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cl) + - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) + - CM script: [get-llvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-llvm) + * get,compiler-flags + - CM script: [get-compiler-flags](https://github.com/mlcommons/cm4mlops/tree/master/script/get-compiler-flags) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-program/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-program/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-program/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-program/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-program/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-program/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-program/_cm.json) + +___ +### Script output +`cmr "compile program c-program cpp-program compile-program compile-c-program compile-cpp-program " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/DevOps-automation/convert-csv-to-md.md b/docs/DevOps-automation/convert-csv-to-md.md new file mode 100644 index 0000000000..129d8588ba --- /dev/null +++ b/docs/DevOps-automation/convert-csv-to-md.md @@ -0,0 +1,143 @@ +Automatically generated README for this automation recipe: **convert-csv-to-md** + +Category: **DevOps automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=convert-csv-to-md,200a95b80bee4a25) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-csv-to-md)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *csv-to-md,convert,to-md,from-csv* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "csv-to-md convert to-md from-csv" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=csv-to-md,convert,to-md,from-csv` + +`cm run script --tags=csv-to-md,convert,to-md,from-csv [--input_flags]` + +*or* + +`cmr "csv-to-md convert to-md from-csv"` + +`cmr "csv-to-md convert to-md from-csv " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'csv-to-md,convert,to-md,from-csv' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="csv-to-md,convert,to-md,from-csv"``` + +#### Run this script via Docker (beta) + +`cm docker script "csv-to-md convert to-md from-csv" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--csv_file=value` → `CM_CSV_FILE=value` +* `--md_file=value` → `CM_MD_FILE=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "csv_file":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-csv-to-md/_cm.json)*** + * get,python3 + * CM names: `--adr.['python, python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-python-lib,_pandas + * CM names: `--adr.['pandas']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.tabulate + * CM names: `--adr.['tabulate']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-csv-to-md/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-csv-to-md/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-csv-to-md/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-csv-to-md/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-csv-to-md/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-csv-to-md/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-csv-to-md/_cm.json) + +___ +### Script output +`cmr "csv-to-md convert to-md from-csv " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/DevOps-automation/copy-to-clipboard.md b/docs/DevOps-automation/copy-to-clipboard.md new file mode 100644 index 0000000000..7122a24c07 --- /dev/null +++ b/docs/DevOps-automation/copy-to-clipboard.md @@ -0,0 +1,141 @@ +Automatically generated README for this automation recipe: **copy-to-clipboard** + +Category: **DevOps automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=copy-to-clipboard,8b3aaa97ce58474d) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/copy-to-clipboard)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *copy,to,clipboard,copy-to-clipboard* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "copy to clipboard copy-to-clipboard" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=copy,to,clipboard,copy-to-clipboard` + +`cm run script --tags=copy,to,clipboard,copy-to-clipboard [--input_flags]` + +*or* + +`cmr "copy to clipboard copy-to-clipboard"` + +`cmr "copy to clipboard copy-to-clipboard " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'copy,to,clipboard,copy-to-clipboard' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="copy,to,clipboard,copy-to-clipboard"``` + +#### Run this script via Docker (beta) + +`cm docker script "copy to clipboard copy-to-clipboard" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--add_quotes=value` → `CM_COPY_TO_CLIPBOARD_TEXT_ADD_QUOTES=value` +* `--q=value` → `CM_COPY_TO_CLIPBOARD_TEXT_ADD_QUOTES=value` +* `--t=value` → `CM_COPY_TO_CLIPBOARD_TEXT=value` +* `--text=value` → `CM_COPY_TO_CLIPBOARD_TEXT=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "add_quotes":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/copy-to-clipboard/_cm.yaml)*** + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-python-lib,_package.pyperclip + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/copy-to-clipboard/_cm.yaml) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/copy-to-clipboard/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/copy-to-clipboard/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/copy-to-clipboard/_cm.yaml) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/copy-to-clipboard/_cm.yaml) + +___ +### Script output +`cmr "copy to clipboard copy-to-clipboard " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/DevOps-automation/create-conda-env.md b/docs/DevOps-automation/create-conda-env.md new file mode 100644 index 0000000000..f97bb0f5a3 --- /dev/null +++ b/docs/DevOps-automation/create-conda-env.md @@ -0,0 +1,148 @@ +Automatically generated README for this automation recipe: **create-conda-env** + +Category: **DevOps automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=create-conda-env,e39e0b04c86a40f2) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-conda-env)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *create,get,env,conda-env,conda-environment,create-conda-environment* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "create get env conda-env conda-environment create-conda-environment" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=create,get,env,conda-env,conda-environment,create-conda-environment` + +`cm run script --tags=create,get,env,conda-env,conda-environment,create-conda-environment[,variations] ` + +*or* + +`cmr "create get env conda-env conda-environment create-conda-environment"` + +`cmr "create get env conda-env conda-environment create-conda-environment [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'create,get,env,conda-env,conda-environment,create-conda-environment' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="create,get,env,conda-env,conda-environment,create-conda-environment"``` + +#### Run this script via Docker (beta) + +`cm docker script "create get env conda-env conda-environment create-conda-environment[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_name.#` + - Environment variables: + - *CM_CONDA_ENV_NAME*: `#` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-conda-env/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,conda + * CM names: `--adr.['conda']...` + - CM script: [get-conda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-conda) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-conda-env/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-conda-env/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-conda-env/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-conda-env/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-conda-env/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-conda-env/_cm.json) + +___ +### Script output +`cmr "create get env conda-env conda-environment create-conda-environment [,variations]" -j` +#### New environment keys (filter) + +* `+LD_LIBRARY_PATH` +* `+PATH` +* `CM_CONDA_BIN_PATH` +* `CM_CONDA_LIB_PATH` +* `CM_CONDA_PREFIX` +* `CONDA_PREFIX` +#### New environment keys auto-detected from customize + +* `CM_CONDA_BIN_PATH` +* `CM_CONDA_LIB_PATH` +* `CM_CONDA_PREFIX` \ No newline at end of file diff --git a/docs/DevOps-automation/create-patch.md b/docs/DevOps-automation/create-patch.md new file mode 100644 index 0000000000..664c378f54 --- /dev/null +++ b/docs/DevOps-automation/create-patch.md @@ -0,0 +1,135 @@ +Automatically generated README for this automation recipe: **create-patch** + +Category: **DevOps automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=create-patch,0659dc1f75664c65) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-patch)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *create,patch* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "create patch" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=create,patch` + +`cm run script --tags=create,patch [--input_flags]` + +*or* + +`cmr "create patch"` + +`cmr "create patch " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'create,patch' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="create,patch"``` + +#### Run this script via Docker (beta) + +`cm docker script "create patch" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--exclude=value` → `CM_CREATE_PATCH_EXCLUDE=value` +* `--new=value` → `CM_CREATE_PATCH_NEW=value` +* `--old=value` → `CM_CREATE_PATCH_OLD=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "exclude":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-patch/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-patch/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-patch/_cm.yaml) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-patch/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-patch/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-patch/_cm.yaml) + +___ +### Script output +`cmr "create patch " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/DevOps-automation/detect-sudo.md b/docs/DevOps-automation/detect-sudo.md new file mode 100644 index 0000000000..49e48b530a --- /dev/null +++ b/docs/DevOps-automation/detect-sudo.md @@ -0,0 +1,120 @@ +Automatically generated README for this automation recipe: **detect-sudo** + +Category: **DevOps automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=detect-sudo,1d47ffc556e248dc) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-sudo)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *detect,sudo,access* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "detect sudo access" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=detect,sudo,access` + +`cm run script --tags=detect,sudo,access ` + +*or* + +`cmr "detect sudo access"` + +`cmr "detect sudo access " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'detect,sudo,access' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="detect,sudo,access"``` + +#### Run this script via Docker (beta) + +`cm docker script "detect sudo access" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-sudo/_cm.yaml) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-sudo/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-sudo/_cm.yaml) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-sudo/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-sudo/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-sudo/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-sudo/_cm.yaml) + +___ +### Script output +`cmr "detect sudo access " -j` +#### New environment keys (filter) + +* `CM_SUDO_*` +#### New environment keys auto-detected from customize + +* `CM_SUDO_USER` \ No newline at end of file diff --git a/docs/DevOps-automation/download-and-extract.md b/docs/DevOps-automation/download-and-extract.md new file mode 100644 index 0000000000..1d802285ed --- /dev/null +++ b/docs/DevOps-automation/download-and-extract.md @@ -0,0 +1,216 @@ +Automatically generated README for this automation recipe: **download-and-extract** + +Category: **DevOps automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=download-and-extract,c67e81a4ce2649f5) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-and-extract)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *download-and-extract,file* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "download-and-extract file" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=download-and-extract,file` + +`cm run script --tags=download-and-extract,file[,variations] [--input_flags]` + +*or* + +`cmr "download-and-extract file"` + +`cmr "download-and-extract file [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'download-and-extract,file' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="download-and-extract,file"``` + +#### Run this script via Docker (beta) + +`cm docker script "download-and-extract file[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_extract` + - Environment variables: + - *CM_DAE_EXTRACT_DOWNLOADED*: `yes` + - Workflow: + * `_keep` + - Environment variables: + - *CM_EXTRACT_REMOVE_EXTRACTED*: `no` + - Workflow: + * `_no-remove-extracted` + - Environment variables: + - *CM_EXTRACT_REMOVE_EXTRACTED*: `no` + - Workflow: + * `_url.#` + - Environment variables: + - *CM_DAE_URL*: `#` + - Workflow: + +
+ + + * Group "**download-tool**" +
+ Click here to expand this section. + + * **`_cmutil`** (default) + - Workflow: + * `_curl` + - Workflow: + * `_gdown` + - Workflow: + * `_rclone` + - Workflow: + * `_torrent` + - Environment variables: + - *CM_DAE_DOWNLOAD_USING_TORRENT*: `yes` + - *CM_TORRENT_DOWNLOADED_FILE_NAME*: `<<>>` + - *CM_TORRENT_DOWNLOADED_PATH_ENV_KEY*: `CM_DAE_FILEPATH` + - *CM_TORRENT_WAIT_UNTIL_COMPLETED*: `yes` + - Workflow: + 1. ***Read "prehook_deps" on other CM scripts*** + * download,torrent + - CM script: [download-torrent](https://github.com/mlcommons/cm4mlops/tree/master/script/download-torrent) + * `_wget` + - Workflow: + +
+ + +#### Default variations + +`_cmutil` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--download_path=value` → `CM_DOWNLOAD_PATH=value` +* `--extra_folder=value` → `CM_EXTRACT_TO_FOLDER=value` +* `--extract_path=value` → `CM_EXTRACT_PATH=value` +* `--from=value` → `CM_DOWNLOAD_LOCAL_FILE_PATH=value` +* `--local_path=value` → `CM_DOWNLOAD_LOCAL_FILE_PATH=value` +* `--store=value` → `CM_DOWNLOAD_PATH=value` +* `--to=value` → `CM_EXTRACT_PATH=value` +* `--url=value` → `CM_DAE_URL=value` +* `--verify=value` → `CM_VERIFY_SSL=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "download_path":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-and-extract/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-and-extract/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-and-extract/_cm.json)*** + * download,file + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_DAE_DOWNLOAD_USING_TORRENT': ['yes', 'True']}` + * CM names: `--adr.['download-script']...` + - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) + 1. ***Run native script if exists*** + 1. ***Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-and-extract/_cm.json)*** + * extract,file + * Enable this dependency only if all ENV vars are set:
+`{'CM_DAE_EXTRACT_DOWNLOADED': ['yes', 'True']}` + * CM names: `--adr.['extract-script']...` + - CM script: [extract-file](https://github.com/mlcommons/cm4mlops/tree/master/script/extract-file) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-and-extract/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-and-extract/_cm.json) + +___ +### Script output +`cmr "download-and-extract file [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `<<>>` +* `<<>>` +* `CM_DOWNLOAD_DOWNLOADED_PATH*` +* `CM_EXTRACT_EXTRACTED_PATH` +* `CM_GET_DEPENDENT_CACHED_PATH` +#### New environment keys auto-detected from customize + +* `CM_GET_DEPENDENT_CACHED_PATH` \ No newline at end of file diff --git a/docs/DevOps-automation/download-file.md b/docs/DevOps-automation/download-file.md new file mode 100644 index 0000000000..73df26d096 --- /dev/null +++ b/docs/DevOps-automation/download-file.md @@ -0,0 +1,202 @@ +Automatically generated README for this automation recipe: **download-file** + +Category: **DevOps automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=download-file,9cdc8dc41aae437e) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-file)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *download,file* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "download file" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=download,file` + +`cm run script --tags=download,file[,variations] [--input_flags]` + +*or* + +`cmr "download file"` + +`cmr "download file [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'download,file' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="download,file"``` + +#### Run this script via Docker (beta) + +`cm docker script "download file[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_url.#` + - Environment variables: + - *CM_DOWNLOAD_URL*: `#` + - Workflow: + +
+ + + * Group "**download-tool**" +
+ Click here to expand this section. + + * **`_cmutil`** (default) + - Environment variables: + - *CM_DOWNLOAD_TOOL*: `cmutil` + - Workflow: + * `_curl` + - Environment variables: + - *CM_DOWNLOAD_TOOL*: `curl` + - Workflow: + * `_gdown` + - Environment variables: + - *CM_DOWNLOAD_TOOL*: `gdown` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_package.gdown + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_rclone` + - Environment variables: + - *CM_DOWNLOAD_TOOL*: `rclone` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,rclone + - CM script: [get-rclone](https://github.com/mlcommons/cm4mlops/tree/master/script/get-rclone) + * `_wget` + - Environment variables: + - *CM_DOWNLOAD_TOOL*: `wget` + - Workflow: + +
+ + +#### Default variations + +`_cmutil` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--download_path=value` → `CM_DOWNLOAD_PATH=value` +* `--from=value` → `CM_DOWNLOAD_LOCAL_FILE_PATH=value` +* `--local_path=value` → `CM_DOWNLOAD_LOCAL_FILE_PATH=value` +* `--md5sum=value` → `CM_DOWNLOAD_CHECKSUM=value` +* `--store=value` → `CM_DOWNLOAD_PATH=value` +* `--url=value` → `CM_DOWNLOAD_URL=value` +* `--verify=value` → `CM_VERIFY_SSL=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "download_path":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_RCLONE_COPY_USING: `sync` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-file/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-file/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-file/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-file/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-file/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-file/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-file/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-file/_cm.json) + +___ +### Script output +`cmr "download file [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `<<>>` +* `CM_DOWNLOAD_DOWNLOADED_PATH` +* `CM_GET_DEPENDENT_CACHED_PATH` +#### New environment keys auto-detected from customize + +* `CM_DOWNLOAD_DOWNLOADED_PATH` +* `CM_GET_DEPENDENT_CACHED_PATH` \ No newline at end of file diff --git a/docs/DevOps-automation/download-torrent.md b/docs/DevOps-automation/download-torrent.md new file mode 100644 index 0000000000..e14037e1d8 --- /dev/null +++ b/docs/DevOps-automation/download-torrent.md @@ -0,0 +1,155 @@ +Automatically generated README for this automation recipe: **download-torrent** + +Category: **DevOps automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=download-torrent,69b752c5618e45bb) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-torrent)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *download,torrent,download-torrent* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "download torrent download-torrent" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=download,torrent,download-torrent` + +`cm run script --tags=download,torrent,download-torrent[,variations] [--input_flags]` + +*or* + +`cmr "download torrent download-torrent"` + +`cmr "download torrent download-torrent [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'download,torrent,download-torrent' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="download,torrent,download-torrent"``` + +#### Run this script via Docker (beta) + +`cm docker script "download torrent download-torrent[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_torrent.#` + - Environment variables: + - *CM_TORRENT_FILE*: `#` + - Workflow: + +
+ + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--wait=value` → `CM_TORRENT_WAIT_UNTIL_COMPLETED=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "wait":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_TORRENT_WAIT_UNTIL_COMPLETED: `no` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-torrent/_cm.json)*** + * get,generic-sys-util,_transmission + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-torrent/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-torrent/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-torrent/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-torrent/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-torrent/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-torrent/_cm.json) + +___ +### Script output +`cmr "download torrent download-torrent [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `<<>>` +* `CM_TORRENT_DOWNLOADED_PATH` +#### New environment keys auto-detected from customize + +* `CM_TORRENT_DOWNLOADED_PATH` \ No newline at end of file diff --git a/docs/DevOps-automation/extract-file.md b/docs/DevOps-automation/extract-file.md new file mode 100644 index 0000000000..a9df0d22e3 --- /dev/null +++ b/docs/DevOps-automation/extract-file.md @@ -0,0 +1,168 @@ +Automatically generated README for this automation recipe: **extract-file** + +Category: **DevOps automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=extract-file,3f0b76219d004817) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/extract-file)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *extract,file* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "extract file" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=extract,file` + +`cm run script --tags=extract,file[,variations] [--input_flags]` + +*or* + +`cmr "extract file"` + +`cmr "extract file [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'extract,file' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="extract,file"``` + +#### Run this script via Docker (beta) + +`cm docker script "extract file[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_keep` + - Environment variables: + - *CM_EXTRACT_REMOVE_EXTRACTED*: `no` + - Workflow: + * `_no-remove-extracted` + - Environment variables: + - *CM_EXTRACT_REMOVE_EXTRACTED*: `no` + - Workflow: + * `_path.#` + - Environment variables: + - *CM_EXTRACT_FILEPATH*: `#` + - Workflow: + +
+ + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--extra_folder=value` → `CM_EXTRACT_TO_FOLDER=value` +* `--extract_path=value` → `CM_EXTRACT_PATH=value` +* `--input=value` → `CM_EXTRACT_FILEPATH=value` +* `--to=value` → `CM_EXTRACT_PATH=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "extra_folder":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/extract-file/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/extract-file/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/extract-file/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/extract-file/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/extract-file/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/extract-file/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/extract-file/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/extract-file/_cm.json) + +___ +### Script output +`cmr "extract file [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `<<>>` +* `CM_EXTRACT_EXTRACTED_PATH` +* `CM_GET_DEPENDENT_CACHED_PATH` +#### New environment keys auto-detected from customize + +* `CM_EXTRACT_EXTRACTED_PATH` +* `CM_GET_DEPENDENT_CACHED_PATH` \ No newline at end of file diff --git a/docs/DevOps-automation/fail.md b/docs/DevOps-automation/fail.md new file mode 100644 index 0000000000..6784dbba73 --- /dev/null +++ b/docs/DevOps-automation/fail.md @@ -0,0 +1,132 @@ +Automatically generated README for this automation recipe: **fail** + +Category: **DevOps automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=fail,3aaee82e19d243cd) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/fail)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *fail,filter* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "fail filter" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=fail,filter` + +`cm run script --tags=fail,filter[,variations] ` + +*or* + +`cmr "fail filter"` + +`cmr "fail filter [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'fail,filter' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="fail,filter"``` + +#### Run this script via Docker (beta) + +`cm docker script "fail filter[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_windows` + - Environment variables: + - *CM_FAIL_WINDOWS*: `True` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/fail/_cm.yaml) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/fail/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/fail/_cm.yaml) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/fail/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/fail/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/fail/_cm.yaml) + +___ +### Script output +`cmr "fail filter [,variations]" -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/DevOps-automation/get-conda.md b/docs/DevOps-automation/get-conda.md new file mode 100644 index 0000000000..6999e6a00d --- /dev/null +++ b/docs/DevOps-automation/get-conda.md @@ -0,0 +1,164 @@ +Automatically generated README for this automation recipe: **get-conda** + +Category: **DevOps automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-conda,6600115f41324c7b) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-conda)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,conda,get-conda* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get conda get-conda" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,conda,get-conda` + +`cm run script --tags=get,conda,get-conda[,variations] ` + +*or* + +`cmr "get conda get-conda"` + +`cmr "get conda get-conda [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,conda,get-conda' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,conda,get-conda"``` + +#### Run this script via Docker (beta) + +`cm docker script "get conda get-conda[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_name.#` + - Environment variables: + - *CM_CONDA_PREFIX_NAME*: `#` + - Workflow: + +
+ + + * Group "**conda-python**" +
+ Click here to expand this section. + + * `_python-3.#` + - Environment variables: + - *CM_CONDA_PYTHON_VERSION*: `3.#` + - Workflow: + * `_python-3.8` + - Environment variables: + - *CM_CONDA_PYTHON_VERSION*: `3.8` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-conda/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-conda/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-conda/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-conda/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-conda/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-conda/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-conda/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-conda/_cm.json) + +___ +### Script output +`cmr "get conda get-conda [,variations]" -j` +#### New environment keys (filter) + +* `+LD_LIBRARY_PATH` +* `+PATH` +* `CM_CONDA_BIN_PATH` +* `CM_CONDA_BIN_WITH_PATH` +* `CM_CONDA_LIB_PATH` +* `CM_CONDA_PREFIX` +* `CONDA_PREFIX` +#### New environment keys auto-detected from customize + +* `CM_CONDA_BIN_PATH` +* `CM_CONDA_BIN_WITH_PATH` +* `CM_CONDA_LIB_PATH` +* `CM_CONDA_PREFIX` \ No newline at end of file diff --git a/docs/DevOps-automation/get-git-repo.md b/docs/DevOps-automation/get-git-repo.md new file mode 100644 index 0000000000..17ecb4b5bc --- /dev/null +++ b/docs/DevOps-automation/get-git-repo.md @@ -0,0 +1,240 @@ +Automatically generated README for this automation recipe: **get-git-repo** + +Category: **DevOps automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-git-repo,ed603e7292974f10) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-git-repo)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,git,repo,repository,clone* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get git repo repository clone" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,git,repo,repository,clone` + +`cm run script --tags=get,git,repo,repository,clone[,variations] [--input_flags]` + +*or* + +`cmr "get git repo repository clone"` + +`cmr "get git repo repository clone [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,git,repo,repository,clone' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,git,repo,repository,clone"``` + +#### Run this script via Docker (beta) + +`cm docker script "get git repo repository clone[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_lfs` + - Environment variables: + - *CM_GIT_REPO_NEEDS_LFS*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic,sys-util,_git-lfs + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * `_no-recurse-submodules` + - Environment variables: + - *CM_GIT_RECURSE_SUBMODULES*: `` + - Workflow: + * `_patch` + - Environment variables: + - *CM_GIT_PATCH*: `yes` + - Workflow: + * `_submodules.#` + - Environment variables: + - *CM_GIT_SUBMODULES*: `#` + - Workflow: + +
+ + + * Group "**checkout**" +
+ Click here to expand this section. + + * `_branch.#` + - Environment variables: + - *CM_GIT_BRANCH*: `#` + - Workflow: + * `_sha.#` + - Environment variables: + - *CM_GIT_SHA*: `#` + - Workflow: + * `_tag.#` + - Environment variables: + - *CM_GIT_CHECKOUT_TAG*: `#` + - Workflow: + +
+ + + * Group "**git-history**" +
+ Click here to expand this section. + + * `_full-history` + - Environment variables: + - *CM_GIT_DEPTH*: `` + - Workflow: + * **`_short-history`** (default) + - Environment variables: + - *CM_GIT_DEPTH*: `--depth 5` + - Workflow: + +
+ + + * Group "**repo**" +
+ Click here to expand this section. + + * `_repo.#` + - Environment variables: + - *CM_GIT_URL*: `#` + - Workflow: + +
+ + +#### Default variations + +`_short-history` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--branch=value` → `CM_GIT_CHECKOUT=value` +* `--depth=value` → `CM_GIT_DEPTH=value` +* `--env_key=value` → `CM_GIT_ENV_KEY=value` +* `--folder=value` → `CM_GIT_CHECKOUT_FOLDER=value` +* `--patch=value` → `CM_GIT_PATCH=value` +* `--submodules=value` → `CM_GIT_RECURSE_SUBMODULES=value` +* `--update=value` → `CM_GIT_REPO_PULL=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "branch":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_GIT_DEPTH: `--depth 4` +* CM_GIT_CHECKOUT_FOLDER: `repo` +* CM_GIT_PATCH: `no` +* CM_GIT_RECURSE_SUBMODULES: ` --recurse-submodules` +* CM_GIT_URL: `https://github.com/mlcommons/ck.git` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-git-repo/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-git-repo/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-git-repo/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-git-repo/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-git-repo/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-git-repo/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-git-repo/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-git-repo/_cm.json)*** + * pull,git,repo + * Enable this dependency only if all ENV vars are set:
+`{'CM_GIT_REPO_PULL': ['yes', 'True']}` + * CM names: `--adr.['pull-git-repo']...` + - CM script: [pull-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/pull-git-repo) + +___ +### Script output +`cmr "get git repo repository clone [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `<<>>` +* `CM_GIT_CHECKOUT_PATH` +* `CM_GIT_REPO_*` +#### New environment keys auto-detected from customize + +* `CM_GIT_CHECKOUT_PATH` +* `CM_GIT_REPO_CURRENT_HASH` \ No newline at end of file diff --git a/docs/DevOps-automation/get-github-cli.md b/docs/DevOps-automation/get-github-cli.md new file mode 100644 index 0000000000..fee40cc4ea --- /dev/null +++ b/docs/DevOps-automation/get-github-cli.md @@ -0,0 +1,120 @@ +Automatically generated README for this automation recipe: **get-github-cli** + +Category: **DevOps automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-github-cli,1417029c6ff44f21) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-github-cli)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,gh,gh-cli,github,cli,github-cli* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get gh gh-cli github cli github-cli" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,gh,gh-cli,github,cli,github-cli` + +`cm run script --tags=get,gh,gh-cli,github,cli,github-cli ` + +*or* + +`cmr "get gh gh-cli github cli github-cli"` + +`cmr "get gh gh-cli github cli github-cli " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,gh,gh-cli,github,cli,github-cli' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,gh,gh-cli,github,cli,github-cli"``` + +#### Run this script via Docker (beta) + +`cm docker script "get gh gh-cli github cli github-cli" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-github-cli/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-github-cli/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-github-cli/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-github-cli/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-github-cli/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-github-cli/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-github-cli/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-github-cli/_cm.json) + +___ +### Script output +`cmr "get gh gh-cli github cli github-cli " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/DevOps-automation/pull-git-repo.md b/docs/DevOps-automation/pull-git-repo.md new file mode 100644 index 0000000000..63b1e3157c --- /dev/null +++ b/docs/DevOps-automation/pull-git-repo.md @@ -0,0 +1,134 @@ +Automatically generated README for this automation recipe: **pull-git-repo** + +Category: **DevOps automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=pull-git-repo,c23132ed65c4421d) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/pull-git-repo)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *pull,git,repo,repository* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "pull git repo repository" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=pull,git,repo,repository` + +`cm run script --tags=pull,git,repo,repository [--input_flags]` + +*or* + +`cmr "pull git repo repository"` + +`cmr "pull git repo repository " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'pull,git,repo,repository' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="pull,git,repo,repository"``` + +#### Run this script via Docker (beta) + +`cm docker script "pull git repo repository" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--path=value` → `CM_GIT_CHECKOUT_PATH=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "path":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/pull-git-repo/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/pull-git-repo/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/pull-git-repo/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/pull-git-repo/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/pull-git-repo/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/pull-git-repo/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/pull-git-repo/_cm.json) + +___ +### Script output +`cmr "pull git repo repository " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/DevOps-automation/push-csv-to-spreadsheet.md b/docs/DevOps-automation/push-csv-to-spreadsheet.md new file mode 100644 index 0000000000..124332bbcc --- /dev/null +++ b/docs/DevOps-automation/push-csv-to-spreadsheet.md @@ -0,0 +1,142 @@ +Automatically generated README for this automation recipe: **push-csv-to-spreadsheet** + +Category: **DevOps automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=push-csv-to-spreadsheet,5ec9e5fa7feb4fff) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-csv-to-spreadsheet)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *push,google-spreadsheet,spreadsheet,push-to-google-spreadsheet* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "push google-spreadsheet spreadsheet push-to-google-spreadsheet" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=push,google-spreadsheet,spreadsheet,push-to-google-spreadsheet` + +`cm run script --tags=push,google-spreadsheet,spreadsheet,push-to-google-spreadsheet [--input_flags]` + +*or* + +`cmr "push google-spreadsheet spreadsheet push-to-google-spreadsheet"` + +`cmr "push google-spreadsheet spreadsheet push-to-google-spreadsheet " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'push,google-spreadsheet,spreadsheet,push-to-google-spreadsheet' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="push,google-spreadsheet,spreadsheet,push-to-google-spreadsheet"``` + +#### Run this script via Docker (beta) + +`cm docker script "push google-spreadsheet spreadsheet push-to-google-spreadsheet" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--csv_file=value` → `CM_CSV_FILE_PATH=value` +* `--sheet_name=value` → `CM_GOOGLE_SHEET_NAME=value` +* `--spreadsheet_id=value` → `CM_GOOGLE_SPREADSHEET_ID=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "csv_file":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_GOOGLE_SPREADSHEET_ID: `1gMHjXmFmwZR4-waPPyxy5Pc3VARqX3kKUWxkP97Xa6Y` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-csv-to-spreadsheet/_cm.json)*** + * get,python3 + * CM names: `--adr.['python3', 'python']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-python-lib,_google-api-python-client + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_google-auth-oauthlib + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-csv-to-spreadsheet/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-csv-to-spreadsheet/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-csv-to-spreadsheet/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-csv-to-spreadsheet/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-csv-to-spreadsheet/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-csv-to-spreadsheet/_cm.json) + +___ +### Script output +`cmr "push google-spreadsheet spreadsheet push-to-google-spreadsheet " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/DevOps-automation/set-device-settings-qaic.md b/docs/DevOps-automation/set-device-settings-qaic.md new file mode 100644 index 0000000000..2033b7695c --- /dev/null +++ b/docs/DevOps-automation/set-device-settings-qaic.md @@ -0,0 +1,143 @@ +Automatically generated README for this automation recipe: **set-device-settings-qaic** + +Category: **DevOps automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=set-device-settings-qaic,408a1a1563b44780) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-device-settings-qaic)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *set,device,qaic,ai100,cloud,performance,power,setting,mode,vc,ecc* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "set device qaic ai100 cloud performance power setting mode vc ecc" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=set,device,qaic,ai100,cloud,performance,power,setting,mode,vc,ecc` + +`cm run script --tags=set,device,qaic,ai100,cloud,performance,power,setting,mode,vc,ecc[,variations] ` + +*or* + +`cmr "set device qaic ai100 cloud performance power setting mode vc ecc"` + +`cmr "set device qaic ai100 cloud performance power setting mode vc ecc [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'set,device,qaic,ai100,cloud,performance,power,setting,mode,vc,ecc' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="set,device,qaic,ai100,cloud,performance,power,setting,mode,vc,ecc"``` + +#### Run this script via Docker (beta) + +`cm docker script "set device qaic ai100 cloud performance power setting mode vc ecc[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_ecc` + - Environment variables: + - *CM_QAIC_ECC*: `yes` + - Workflow: + * `_vc.#` + - Environment variables: + - *CM_QAIC_VC*: `#` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_QAIC_DEVICES: `0` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-device-settings-qaic/_cm.json)*** + * detect-os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,qaic,platform,sdk + - CM script: [get-qaic-platform-sdk](https://github.com/mlcommons/cm4mlops/tree/master/script/get-qaic-platform-sdk) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-device-settings-qaic/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-device-settings-qaic/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-device-settings-qaic/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-device-settings-qaic/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-device-settings-qaic/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-device-settings-qaic/_cm.json) + +___ +### Script output +`cmr "set device qaic ai100 cloud performance power setting mode vc ecc [,variations]" -j` +#### New environment keys (filter) + +* `CM_QAIC_DEVICE_*` +#### New environment keys auto-detected from customize diff --git a/docs/DevOps-automation/set-echo-off-win.md b/docs/DevOps-automation/set-echo-off-win.md new file mode 100644 index 0000000000..46e87495c0 --- /dev/null +++ b/docs/DevOps-automation/set-echo-off-win.md @@ -0,0 +1,116 @@ +Automatically generated README for this automation recipe: **set-echo-off-win** + +Category: **DevOps automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=set-echo-off-win,49d94b57524f4fcf) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-echo-off-win)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *set,echo,off,win,echo-off-win,echo-off* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "set echo off win echo-off-win echo-off" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=set,echo,off,win,echo-off-win,echo-off` + +`cm run script --tags=set,echo,off,win,echo-off-win,echo-off ` + +*or* + +`cmr "set echo off win echo-off-win echo-off"` + +`cmr "set echo off win echo-off-win echo-off " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'set,echo,off,win,echo-off-win,echo-off' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="set,echo,off,win,echo-off-win,echo-off"``` + +#### Run this script via Docker (beta) + +`cm docker script "set echo off win echo-off-win echo-off" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-echo-off-win/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-echo-off-win/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-echo-off-win/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-echo-off-win/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-echo-off-win/_cm.json) + +___ +### Script output +`cmr "set echo off win echo-off-win echo-off " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/DevOps-automation/set-performance-mode.md b/docs/DevOps-automation/set-performance-mode.md new file mode 100644 index 0000000000..0fd389fe27 --- /dev/null +++ b/docs/DevOps-automation/set-performance-mode.md @@ -0,0 +1,180 @@ +Automatically generated README for this automation recipe: **set-performance-mode** + +Category: **DevOps automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=set-performance-mode,2c0ab7b64692443d) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-performance-mode)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *set,system,performance,power,mode* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "set system performance power mode" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=set,system,performance,power,mode` + +`cm run script --tags=set,system,performance,power,mode[,variations] ` + +*or* + +`cmr "set system performance power mode"` + +`cmr "set system performance power mode [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'set,system,performance,power,mode' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="set,system,performance,power,mode"``` + +#### Run this script via Docker (beta) + +`cm docker script "set system performance power mode[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_reproducibility` + - Environment variables: + - *CM_SET_OS_PERFORMANCE_REPRODUCIBILITY_MODE*: `yes` + - Workflow: + +
+ + + * Group "**device**" +
+ Click here to expand this section. + + * **`_cpu`** (default) + - Environment variables: + - *CM_SET_PERFORMANCE_MODE_OF*: `cpu` + - Workflow: + +
+ + + * Group "**performance-mode**" +
+ Click here to expand this section. + + * **`_performance`** (default) + - Environment variables: + - *CM_SET_PERFORMANCE_MODE*: `performance` + - Workflow: + +
+ + + * Group "**power**" +
+ Click here to expand this section. + + * `_power` + - Environment variables: + - *CM_SET_PERFORMANCE_MODE*: `power` + - Workflow: + +
+ + +#### Default variations + +`_cpu,_performance` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-performance-mode/_cm.json)*** + * detect-os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect-cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-performance-mode/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-performance-mode/_cm.json) + 1. ***Run native script if exists*** + * [run-ubuntu.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-performance-mode/run-ubuntu.sh) + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-performance-mode/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-performance-mode/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-performance-mode/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-performance-mode/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-performance-mode/_cm.json) + +___ +### Script output +`cmr "set system performance power mode [,variations]" -j` +#### New environment keys (filter) + +* `OMP_*` +#### New environment keys auto-detected from customize diff --git a/docs/DevOps-automation/set-sqlite-dir.md b/docs/DevOps-automation/set-sqlite-dir.md new file mode 100644 index 0000000000..ec6e144419 --- /dev/null +++ b/docs/DevOps-automation/set-sqlite-dir.md @@ -0,0 +1,141 @@ +Automatically generated README for this automation recipe: **set-sqlite-dir** + +Category: **DevOps automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=set-sqlite-dir,05904966355a43ac) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-sqlite-dir)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *set,sqlite,dir,sqlite-dir* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "set sqlite dir sqlite-dir" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=set,sqlite,dir,sqlite-dir` + +`cm run script --tags=set,sqlite,dir,sqlite-dir [--input_flags]` + +*or* + +`cmr "set sqlite dir sqlite-dir"` + +`cmr "set sqlite dir sqlite-dir " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'set,sqlite,dir,sqlite-dir' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="set,sqlite,dir,sqlite-dir"``` + +#### Run this script via Docker (beta) + +`cm docker script "set sqlite dir sqlite-dir" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--path=value` → `CM_SQLITE_PATH=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "path":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-sqlite-dir/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-sqlite-dir/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-sqlite-dir/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-sqlite-dir/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-sqlite-dir/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-sqlite-dir/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-sqlite-dir/_cm.json) + +___ +### Script output +`cmr "set sqlite dir sqlite-dir " [--input_flags] -j` +#### New environment keys (filter) + +* `CM_SQLITE_PATH` +#### New environment keys auto-detected from customize + +* `CM_SQLITE_PATH` \ No newline at end of file diff --git a/docs/DevOps-automation/tar-my-folder.md b/docs/DevOps-automation/tar-my-folder.md new file mode 100644 index 0000000000..2b3c6bce59 --- /dev/null +++ b/docs/DevOps-automation/tar-my-folder.md @@ -0,0 +1,133 @@ +Automatically generated README for this automation recipe: **tar-my-folder** + +Category: **DevOps automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=tar-my-folder,3784212e986c456b) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/tar-my-folder)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *run,tar* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "run tar" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=run,tar` + +`cm run script --tags=run,tar [--input_flags]` + +*or* + +`cmr "run tar"` + +`cmr "run tar " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'run,tar' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="run,tar"``` + +#### Run this script via Docker (beta) + +`cm docker script "run tar" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--input_dir=value` → `CM_TAR_INPUT_DIR=value` +* `--outfile=value` → `CM_TAR_OUTFILE=value` +* `--output_dir=value` → `CM_TAR_OUTPUT_DIR=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "input_dir":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/tar-my-folder/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/tar-my-folder/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/tar-my-folder/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/tar-my-folder/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/tar-my-folder/_cm.json) + +___ +### Script output +`cmr "run tar " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Docker-automation/build-docker-image.md b/docs/Docker-automation/build-docker-image.md new file mode 100644 index 0000000000..e9eecd61f5 --- /dev/null +++ b/docs/Docker-automation/build-docker-image.md @@ -0,0 +1,160 @@ +Automatically generated README for this automation recipe: **build-docker-image** + +Category: **Docker automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=build-docker-image,2c3c4ba2413442e7) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-docker-image)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *build,docker,image,docker-image,dockerimage* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "build docker image docker-image dockerimage" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=build,docker,image,docker-image,dockerimage` + +`cm run script --tags=build,docker,image,docker-image,dockerimage [--input_flags]` + +*or* + +`cmr "build docker image docker-image dockerimage"` + +`cmr "build docker image docker-image dockerimage " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'build,docker,image,docker-image,dockerimage' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="build,docker,image,docker-image,dockerimage"``` + +#### Run this script via Docker (beta) + +`cm docker script "build docker image docker-image dockerimage" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--cache=value` → `CM_DOCKER_CACHE=value` +* `--cm_repo=value` → `CM_MLOPS_REPO=value` +* `--docker_os=value` → `CM_DOCKER_OS=value` +* `--docker_os_version=value` → `CM_DOCKER_OS_VERSION=value` +* `--dockerfile=value` → `CM_DOCKERFILE_WITH_PATH=value` +* `--gh_token=value` → `CM_GH_TOKEN=value` +* `--image_name=value` → `CM_DOCKER_IMAGE_NAME=value` +* `--image_repo=value` → `CM_DOCKER_IMAGE_REPO=value` +* `--image_tag=value` → `CM_DOCKER_IMAGE_TAG=value` +* `--post_run_cmds=value` → `CM_DOCKER_POST_RUN_COMMANDS=value` +* `--pre_run_cmds=value` → `CM_DOCKER_PRE_RUN_COMMANDS=value` +* `--push_image=value` → `CM_DOCKER_PUSH_IMAGE=value` +* `--real_run=value` → `CM_REAL_RUN=value` +* `--script_tags=value` → `CM_DOCKER_RUN_SCRIPT_TAGS=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "cache":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_DOCKER_IMAGE_REPO: `local` +* CM_DOCKER_IMAGE_TAG: `latest` + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-docker-image/_cm.yaml) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-docker-image/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-docker-image/_cm.yaml)*** + * build,dockerfile + * Enable this dependency only if all ENV vars are set:
+`{'CM_BUILD_DOCKERFILE': ['yes', '1']}` + - CM script: [build-dockerfile](https://github.com/mlcommons/cm4mlops/tree/master/script/build-dockerfile) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-docker-image/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-docker-image/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-docker-image/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-docker-image/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-docker-image/_cm.yaml) + +___ +### Script output +`cmr "build docker image docker-image dockerimage " [--input_flags] -j` +#### New environment keys (filter) + +* `CM_DOCKER_*` +#### New environment keys auto-detected from customize + +* `CM_DOCKER_BUILD_ARGS` +* `CM_DOCKER_BUILD_CMD` +* `CM_DOCKER_CACHE_ARG` +* `CM_DOCKER_IMAGE_NAME` +* `CM_DOCKER_IMAGE_REPO` +* `CM_DOCKER_IMAGE_TAG` \ No newline at end of file diff --git a/docs/Docker-automation/build-dockerfile.md b/docs/Docker-automation/build-dockerfile.md new file mode 100644 index 0000000000..231cdc2f3a --- /dev/null +++ b/docs/Docker-automation/build-dockerfile.md @@ -0,0 +1,186 @@ +Automatically generated README for this automation recipe: **build-dockerfile** + +Category: **Docker automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=build-dockerfile,e66a7483230d4641) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-dockerfile)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *build,dockerfile* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "build dockerfile" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=build,dockerfile` + +`cm run script --tags=build,dockerfile[,variations] [--input_flags]` + +*or* + +`cmr "build dockerfile"` + +`cmr "build dockerfile [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'build,dockerfile' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="build,dockerfile"``` + +#### Run this script via Docker (beta) + +`cm docker script "build dockerfile[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_slim` + - Environment variables: + - *CM_DOCKER_BUILD_SLIM*: `yes` + - Workflow: + +
+ + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--build=value` → `CM_BUILD_DOCKER_IMAGE=value` +* `--cache=value` → `CM_DOCKER_CACHE=value` +* `--cm_repo=value` → `CM_MLOPS_REPO=value` +* `--cm_repo_flags=value` → `CM_DOCKER_ADD_FLAG_TO_CM_MLOPS_REPO=value` +* `--cm_repos=value` → `CM_DOCKER_EXTRA_CM_REPOS=value` +* `--comments=value` → `CM_DOCKER_RUN_COMMENTS=value` +* `--copy_files=value` → `CM_DOCKER_COPY_FILES=value` +* `--docker_base_image=value` → `CM_DOCKER_IMAGE_BASE=value` +* `--docker_os=value` → `CM_DOCKER_OS=value` +* `--docker_os_version=value` → `CM_DOCKER_OS_VERSION=value` +* `--extra_sys_deps=value` → `CM_DOCKER_EXTRA_SYS_DEPS=value` +* `--fake_docker_deps=value` → `CM_DOCKER_FAKE_DEPS=value` +* `--fake_run_option=value` → `CM_DOCKER_FAKE_RUN_OPTION=value` +* `--file_path=value` → `CM_DOCKERFILE_WITH_PATH=value` +* `--gh_token=value` → `CM_GH_TOKEN=value` +* `--image_repo=value` → `CM_DOCKER_IMAGE_REPO=value` +* `--image_tag=value` → `CM_DOCKER_IMAGE_TAG=value` +* `--package_manager_update_cmd=value` → `CM_PACKAGE_MANAGER_UPDATE_CMD=value` +* `--pip_extra_flags=value` → `CM_DOCKER_PIP_INSTALL_EXTRA_FLAGS=value` +* `--post_file=value` → `DOCKER_IMAGE_POST_FILE=value` +* `--post_run_cmds=value` → `CM_DOCKER_POST_RUN_COMMANDS=value` +* `--pre_run_cmds=value` → `CM_DOCKER_PRE_RUN_COMMANDS=value` +* `--push_image=value` → `CM_DOCKER_PUSH_IMAGE=value` +* `--real_run=value` → `CM_REAL_RUN=value` +* `--run_cmd=value` → `CM_DOCKER_RUN_CMD=value` +* `--run_cmd_extra=value` → `CM_DOCKER_RUN_CMD_EXTRA=value` +* `--script_tags=value` → `CM_DOCKER_RUN_SCRIPT_TAGS=value` +* `--skip_cm_sys_upgrade=value` → `CM_DOCKER_SKIP_CM_SYS_UPGRADE=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "build":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_DOCKER_BUILD_SLIM: `no` +* CM_DOCKER_IMAGE_EOL: ` +` +* CM_DOCKER_OS: `ubuntu` + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-dockerfile/_cm.yaml) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-dockerfile/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-dockerfile/_cm.yaml) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-dockerfile/_cm.yaml) + 1. Run "postrocess" function from customize.py + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-dockerfile/_cm.yaml)*** + * build,docker,image + * Enable this dependency only if all ENV vars are set:
+`{'CM_BUILD_DOCKER_IMAGE': ['yes', '1']}` + * CM names: `--adr.['build-docker-image']...` + - CM script: [build-docker-image](https://github.com/mlcommons/cm4mlops/tree/master/script/build-docker-image) + +___ +### Script output +`cmr "build dockerfile [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_DOCKERFILE_*` +#### New environment keys auto-detected from customize + +* `CM_DOCKERFILE_WITH_PATH` \ No newline at end of file diff --git a/docs/Docker-automation/prune-docker.md b/docs/Docker-automation/prune-docker.md new file mode 100644 index 0000000000..513cc894b7 --- /dev/null +++ b/docs/Docker-automation/prune-docker.md @@ -0,0 +1,118 @@ +Automatically generated README for this automation recipe: **prune-docker** + +Category: **Docker automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=prune-docker,27ead88809bb4d4e) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-docker)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *prune,docker* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "prune docker" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=prune,docker` + +`cm run script --tags=prune,docker ` + +*or* + +`cmr "prune docker"` + +`cmr "prune docker " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'prune,docker' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="prune,docker"``` + +#### Run this script via Docker (beta) + +`cm docker script "prune docker" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-docker/_cm.json) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-docker/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-docker/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-docker/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-docker/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-docker/_cm.json) + +___ +### Script output +`cmr "prune docker " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Docker-automation/run-docker-container.md b/docs/Docker-automation/run-docker-container.md new file mode 100644 index 0000000000..312e03f9f1 --- /dev/null +++ b/docs/Docker-automation/run-docker-container.md @@ -0,0 +1,166 @@ +Automatically generated README for this automation recipe: **run-docker-container** + +Category: **Docker automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=run-docker-container,1e0c884107514b46) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-docker-container)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *run,docker,container* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "run docker container" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=run,docker,container` + +`cm run script --tags=run,docker,container [--input_flags]` + +*or* + +`cmr "run docker container"` + +`cmr "run docker container " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'run,docker,container' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="run,docker,container"``` + +#### Run this script via Docker (beta) + +`cm docker script "run docker container" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--all_gpus=value` → `CM_DOCKER_ADD_ALL_GPUS=value` +* `--base=value` → `CM_DOCKER_IMAGE_BASE=value` +* `--cache=value` → `CM_DOCKER_CACHE=value` +* `--cm_repo=value` → `CM_MLOPS_REPO=value` +* `--detached=value` → `CM_DOCKER_DETACHED_MODE=value` +* `--device=value` → `CM_DOCKER_ADD_DEVICE=value` +* `--docker_image_base=value` → `CM_DOCKER_IMAGE_BASE=value` +* `--docker_os=value` → `CM_DOCKER_OS=value` +* `--docker_os_version=value` → `CM_DOCKER_OS_VERSION=value` +* `--extra_run_args=value` → `CM_DOCKER_EXTRA_RUN_ARGS=value` +* `--fake_run_option=value` → `CM_DOCKER_FAKE_RUN_OPTION=value` +* `--gh_token=value` → `CM_GH_TOKEN=value` +* `--image_name=value` → `CM_DOCKER_IMAGE_NAME=value` +* `--image_repo=value` → `CM_DOCKER_IMAGE_REPO=value` +* `--image_tag=value` → `CM_DOCKER_IMAGE_TAG=value` +* `--image_tag_extra=value` → `CM_DOCKER_IMAGE_TAG_EXTRA=value` +* `--interactive=value` → `CM_DOCKER_INTERACTIVE_MODE=value` +* `--it=value` → `CM_DOCKER_INTERACTIVE=value` +* `--mounts=value` → `CM_DOCKER_VOLUME_MOUNTS=value` +* `--pass_user_group=value` → `CM_DOCKER_PASS_USER_GROUP=value` +* `--port_maps=value` → `CM_DOCKER_PORT_MAPS=value` +* `--post_run_cmds=value` → `CM_DOCKER_POST_RUN_COMMANDS=value` +* `--pre_run_cmds=value` → `CM_DOCKER_PRE_RUN_COMMANDS=value` +* `--real_run=value` → `CM_REAL_RUN=value` +* `--recreate=value` → `CM_DOCKER_IMAGE_RECREATE=value` +* `--run_cmd=value` → `CM_DOCKER_RUN_CMD=value` +* `--run_cmd_extra=value` → `CM_DOCKER_RUN_CMD_EXTRA=value` +* `--save_script=value` → `CM_DOCKER_SAVE_SCRIPT=value` +* `--script_tags=value` → `CM_DOCKER_RUN_SCRIPT_TAGS=value` +* `--shm_size=value` → `CM_DOCKER_SHM_SIZE=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "all_gpus":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_DOCKER_DETACHED_MODE: `yes` + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-docker-container/_cm.yaml) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-docker-container/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-docker-container/_cm.yaml)*** + * build,docker,image + * Skip this dependenecy only if any of ENV vars are set:
+`{'CM_DOCKER_IMAGE_EXISTS': ['yes'], 'CM_DOCKER_SKIP_BUILD': ['yes']}` + * CM names: `--adr.['build-docker-image']...` + - CM script: [build-docker-image](https://github.com/mlcommons/cm4mlops/tree/master/script/build-docker-image) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-docker-container/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-docker-container/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-docker-container/_cm.yaml) + +___ +### Script output +`cmr "run docker container " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/GUI/gui.md b/docs/GUI/gui.md new file mode 100644 index 0000000000..c68754f51a --- /dev/null +++ b/docs/GUI/gui.md @@ -0,0 +1,243 @@ +Automatically generated README for this automation recipe: **gui** + +Category: **GUI** + +License: **Apache 2.0** + +Developers: [Grigori Fursin](https://cKnowledge.org/gfursin) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=gui,605cac42514a4c69) ]* + +--- + +This CM script provides a unified GUI to run CM scripts using [Streamlit library](https://streamlit.io). + +If you want to run it in a cloud (Azure, AWS, GCP), you need to open some port and test that you can reach it from outside. + +By default, streamlit uses port 8501 but you can change it as follows: + +```bash +cm run script "cm gui" --port 80 +``` + +If you have troubles accessing this port, use this simple python module to test if your port is open: +```bash +python3 -m http.server 80 +``` + + + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/gui)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *cm,gui,cm-gui,script-gui,cm-script-gui,streamlit* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "cm gui cm-gui script-gui cm-script-gui streamlit" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=cm,gui,cm-gui,script-gui,cm-script-gui,streamlit` + +`cm run script --tags=cm,gui,cm-gui,script-gui,cm-script-gui,streamlit[,variations] [--input_flags]` + +*or* + +`cmr "cm gui cm-gui script-gui cm-script-gui streamlit"` + +`cmr "cm gui cm-gui script-gui cm-script-gui streamlit [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + + +#### Input Flags + +* --**script**=script tags +* --**app**=gui app + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "script":...} +``` +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'cm,gui,cm-gui,script-gui,cm-script-gui,streamlit' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="cm,gui,cm-gui,script-gui,cm-script-gui,streamlit"``` + +#### Run this script via Docker (beta) + +`cm docker script "cm gui cm-gui script-gui cm-script-gui streamlit[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * Group "**app**" +
+ Click here to expand this section. + + * `_chatgpt` + - Environment variables: + - *CM_GUI_APP*: `chatgpt` + - Workflow: + * `_graph` + - Environment variables: + - *CM_GUI_APP*: `graph` + - Workflow: + 1. ***Read "prehook_deps" on other CM scripts*** + * get,generic-python-lib,_matplotlib + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_mpld3 + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_main` + - Environment variables: + - *CM_GUI_APP*: `app` + - Workflow: + * `_playground` + - Environment variables: + - *CM_GUI_APP*: `playground` + - Workflow: + 1. ***Read "prehook_deps" on other CM scripts*** + * get,generic-python-lib,_matplotlib + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_mpld3 + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_streamlit_option_menu + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_numpy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_pandas + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.plotly + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.streamlit-aggrid + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + +
+ + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--address=value` → `CM_GUI_ADDRESS=value` +* `--app=value` → `CM_GUI_APP=value` +* `--exp_key_c=value` → `CM_GUI_GRAPH_EXPERIMENT_AXIS_KEY_C=value` +* `--exp_key_s=value` → `CM_GUI_GRAPH_EXPERIMENT_AXIS_KEY_S=value` +* `--exp_key_x=value` → `CM_GUI_GRAPH_EXPERIMENT_AXIS_KEY_X=value` +* `--exp_key_y=value` → `CM_GUI_GRAPH_EXPERIMENT_AXIS_KEY_Y=value` +* `--exp_max_results=value` → `CM_GUI_GRAPH_EXPERIMENT_MAX_RESULTS=value` +* `--exp_name=value` → `CM_GUI_GRAPH_EXPERIMENT_NAME=value` +* `--exp_tags=value` → `CM_GUI_GRAPH_EXPERIMENT_TAGS=value` +* `--exp_title=value` → `CM_GUI_GRAPH_EXPERIMENT_TITLE=value` +* `--exp_uid=value` → `CM_GUI_GRAPH_EXPERIMENT_RESULT_UID=value` +* `--no_browser=value` → `CM_GUI_NO_BROWSER=value` +* `--no_run=value` → `CM_GUI_NO_RUN=value` +* `--port=value` → `CM_GUI_PORT=value` +* `--prefix=value` → `CM_GUI_SCRIPT_PREFIX_LINUX=value` +* `--script=value` → `CM_GUI_SCRIPT_TAGS=value` +* `--title=value` → `CM_GUI_TITLE=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "address":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_GUI_EXTRA_CMD: `` +* CM_GUI_SCRIPT_PREFIX_LINUX: `gnome-terminal --` +* CM_GUI_APP: `app` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/gui/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,python + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-python-lib,_cmind + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_streamlit + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/gui/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/gui/_cm.yaml) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/gui/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/gui/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/gui/_cm.yaml) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/gui/_cm.yaml) + +___ +### Script output +`cmr "cm gui cm-gui script-gui cm-script-gui streamlit [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Legacy-CK-support/get-ck-repo-mlops.md b/docs/Legacy-CK-support/get-ck-repo-mlops.md new file mode 100644 index 0000000000..6977a1460c --- /dev/null +++ b/docs/Legacy-CK-support/get-ck-repo-mlops.md @@ -0,0 +1,120 @@ +Automatically generated README for this automation recipe: **get-ck-repo-mlops** + +Category: **Legacy CK support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ck-repo-mlops,d3a619b8186e4f74) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ck-repo-mlops)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,ck-repo,mlops,ck-repo-mlops* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get ck-repo mlops ck-repo-mlops" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,ck-repo,mlops,ck-repo-mlops` + +`cm run script --tags=get,ck-repo,mlops,ck-repo-mlops ` + +*or* + +`cmr "get ck-repo mlops ck-repo-mlops"` + +`cmr "get ck-repo mlops ck-repo-mlops " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,ck-repo,mlops,ck-repo-mlops' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,ck-repo,mlops,ck-repo-mlops"``` + +#### Run this script via Docker (beta) + +`cm docker script "get ck-repo mlops ck-repo-mlops" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ck-repo-mlops/_cm.json)*** + * get,ck + - CM script: [get-ck](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ck) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ck-repo-mlops/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ck-repo-mlops/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ck-repo-mlops/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ck-repo-mlops/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ck-repo-mlops/_cm.json) + +___ +### Script output +`cmr "get ck-repo mlops ck-repo-mlops " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Legacy-CK-support/get-ck.md b/docs/Legacy-CK-support/get-ck.md new file mode 100644 index 0000000000..29a213ab18 --- /dev/null +++ b/docs/Legacy-CK-support/get-ck.md @@ -0,0 +1,118 @@ +Automatically generated README for this automation recipe: **get-ck** + +Category: **Legacy CK support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ck,5575126797174cac) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ck)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,ck,ck-framework* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get ck ck-framework" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,ck,ck-framework` + +`cm run script --tags=get,ck,ck-framework ` + +*or* + +`cmr "get ck ck-framework"` + +`cmr "get ck ck-framework " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,ck,ck-framework' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,ck,ck-framework"``` + +#### Run this script via Docker (beta) + +`cm docker script "get ck ck-framework" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ck/_cm.json) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ck/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ck/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ck/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ck/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ck/_cm.json) + +___ +### Script output +`cmr "get ck ck-framework " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/add-custom-nvidia-system.md b/docs/MLPerf-benchmark-support/add-custom-nvidia-system.md new file mode 100644 index 0000000000..64bf4f9e65 --- /dev/null +++ b/docs/MLPerf-benchmark-support/add-custom-nvidia-system.md @@ -0,0 +1,175 @@ +Automatically generated README for this automation recipe: **add-custom-nvidia-system** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=add-custom-nvidia-system,b2e6c46c6e8745a3) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/add-custom-nvidia-system)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *add,custom,system,nvidia* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "add custom system nvidia" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=add,custom,system,nvidia` + +`cm run script --tags=add,custom,system,nvidia[,variations] ` + +*or* + +`cmr "add custom system nvidia"` + +`cmr "add custom system nvidia [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'add,custom,system,nvidia' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="add,custom,system,nvidia"``` + +#### Run this script via Docker (beta) + +`cm docker script "add custom system nvidia[variations]" ` + +___ +### Customization + + +#### Variations + + * Group "**code**" +
+ Click here to expand this section. + + * `_ctuning` + - Workflow: + * `_custom` + - Workflow: + * `_mlcommons` + - Workflow: + * `_nvidia-only` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +* `r2.1` +* `r3.0` +* `r3.1` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/add-custom-nvidia-system/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,cuda,_cudnn + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * get,tensorrt + - CM script: [get-tensorrt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tensorrt) + * get,cmake + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + * get,generic-python-lib,_requests + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic,sys-util,_glog-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic,sys-util,_gflags-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic,sys-util,_libre2-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic,sys-util,_libnuma-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic,sys-util,_libboost-all-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic,sys-util,_rapidjson-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,nvidia,mlperf,inference,common-code + * CM names: `--adr.['nvidia-inference-common-code']...` + - CM script: [get-mlperf-inference-nvidia-common-code](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-nvidia-common-code) + * get,generic-python-lib,_pycuda + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/add-custom-nvidia-system/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/add-custom-nvidia-system/_cm.yaml) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/add-custom-nvidia-system/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/add-custom-nvidia-system/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/add-custom-nvidia-system/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/add-custom-nvidia-system/_cm.yaml) + +___ +### Script output +`cmr "add custom system nvidia [,variations]" -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/benchmark-any-mlperf-inference-implementation.md b/docs/MLPerf-benchmark-support/benchmark-any-mlperf-inference-implementation.md new file mode 100644 index 0000000000..d84308de46 --- /dev/null +++ b/docs/MLPerf-benchmark-support/benchmark-any-mlperf-inference-implementation.md @@ -0,0 +1,268 @@ +Automatically generated README for this automation recipe: **benchmark-any-mlperf-inference-implementation** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=benchmark-any-mlperf-inference-implementation,8d3cd46f54464810) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-any-mlperf-inference-implementation)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *benchmark,run,natively,all,inference,any,mlperf,mlperf-implementation,implementation,mlperf-models* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "benchmark run natively all inference any mlperf mlperf-implementation implementation mlperf-models" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=benchmark,run,natively,all,inference,any,mlperf,mlperf-implementation,implementation,mlperf-models` + +`cm run script --tags=benchmark,run,natively,all,inference,any,mlperf,mlperf-implementation,implementation,mlperf-models[,variations] [--input_flags]` + +*or* + +`cmr "benchmark run natively all inference any mlperf mlperf-implementation implementation mlperf-models"` + +`cmr "benchmark run natively all inference any mlperf mlperf-implementation implementation mlperf-models [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'benchmark,run,natively,all,inference,any,mlperf,mlperf-implementation,implementation,mlperf-models' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="benchmark,run,natively,all,inference,any,mlperf,mlperf-implementation,implementation,mlperf-models"``` + +#### Run this script via Docker (beta) + +`cm docker script "benchmark run natively all inference any mlperf mlperf-implementation implementation mlperf-models[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_aws-dl2q.24xlarge,qualcomm` + - Workflow: + * `_mini,power` + - Workflow: + * `_orin,power` + - Workflow: + * `_phoenix,nvidia` + - Workflow: + * `_phoenix,power` + - Workflow: + * `_phoenix,reference` + - Workflow: + * `_rb6,power` + - Workflow: + * `_rb6,qualcomm` + - Workflow: + * `_rpi4,power` + - Workflow: + * `_sapphire-rapids.24c,nvidia` + - Workflow: + +
+ + + * Group "**implementation**" +
+ Click here to expand this section. + + * `_deepsparse` + - Environment variables: + - *DIVISION*: `open` + - *IMPLEMENTATION*: `deepsparse` + - Workflow: + * `_intel` + - Environment variables: + - *IMPLEMENTATION*: `intel` + - Workflow: + * `_mil` + - Environment variables: + - *IMPLEMENTATION*: `mil` + - Workflow: + * `_nvidia` + - Environment variables: + - *IMPLEMENTATION*: `nvidia-original` + - Workflow: + * `_qualcomm` + - Environment variables: + - *IMPLEMENTATION*: `qualcomm` + - Workflow: + * `_reference` + - Environment variables: + - *IMPLEMENTATION*: `reference` + - Workflow: + * `_tflite-cpp` + - Environment variables: + - *IMPLEMENTATION*: `tflite_cpp` + - Workflow: + +
+ + + * Group "**power**" +
+ Click here to expand this section. + + * **`_performance-only`** (default) + - Workflow: + * `_power` + - Environment variables: + - *POWER*: `True` + - Workflow: + +
+ + + * Group "**sut**" +
+ Click here to expand this section. + + * `_aws-dl2q.24xlarge` + - Workflow: + * `_macbookpro-m1` + - Environment variables: + - *CATEGORY*: `edge` + - *DIVISION*: `closed` + - Workflow: + * `_mini` + - Workflow: + * `_orin` + - Workflow: + * `_orin.32g` + - Environment variables: + - *CATEGORY*: `edge` + - *DIVISION*: `closed` + - Workflow: + * `_phoenix` + - Environment variables: + - *CATEGORY*: `edge` + - *DIVISION*: `closed` + - Workflow: + * `_rb6` + - Workflow: + * `_rpi4` + - Workflow: + * `_sapphire-rapids.24c` + - Environment variables: + - *CATEGORY*: `edge` + - *DIVISION*: `closed` + - Workflow: + +
+ + +#### Default variations + +`_performance-only` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--backends=value` → `BACKENDS=value` +* `--category=value` → `CATEGORY=value` +* `--devices=value` → `DEVICES=value` +* `--division=value` → `DIVISION=value` +* `--extra_args=value` → `EXTRA_ARGS=value` +* `--models=value` → `MODELS=value` +* `--power_server=value` → `POWER_SERVER=value` +* `--power_server_port=value` → `POWER_SERVER_PORT=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "backends":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* DIVISION: `open` +* CATEGORY: `edge` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-any-mlperf-inference-implementation/_cm.yaml)*** + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-any-mlperf-inference-implementation/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-any-mlperf-inference-implementation/_cm.yaml) + 1. ***Run native script if exists*** + * [run-template.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-any-mlperf-inference-implementation/run-template.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-any-mlperf-inference-implementation/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-any-mlperf-inference-implementation/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-any-mlperf-inference-implementation/_cm.yaml) + +___ +### Script output +`cmr "benchmark run natively all inference any mlperf mlperf-implementation implementation mlperf-models [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/build-mlperf-inference-server-nvidia.md b/docs/MLPerf-benchmark-support/build-mlperf-inference-server-nvidia.md new file mode 100644 index 0000000000..d97de80540 --- /dev/null +++ b/docs/MLPerf-benchmark-support/build-mlperf-inference-server-nvidia.md @@ -0,0 +1,248 @@ +Automatically generated README for this automation recipe: **build-mlperf-inference-server-nvidia** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=build-mlperf-inference-server-nvidia,f37403af5e9f4541) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-mlperf-inference-server-nvidia)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *build,mlcommons,mlperf,inference,inference-server,server,nvidia-harness,nvidia* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "build mlcommons mlperf inference inference-server server nvidia-harness nvidia" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=build,mlcommons,mlperf,inference,inference-server,server,nvidia-harness,nvidia` + +`cm run script --tags=build,mlcommons,mlperf,inference,inference-server,server,nvidia-harness,nvidia[,variations] [--input_flags]` + +*or* + +`cmr "build mlcommons mlperf inference inference-server server nvidia-harness nvidia"` + +`cmr "build mlcommons mlperf inference inference-server server nvidia-harness nvidia [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'build,mlcommons,mlperf,inference,inference-server,server,nvidia-harness,nvidia' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="build,mlcommons,mlperf,inference,inference-server,server,nvidia-harness,nvidia"``` + +#### Run this script via Docker (beta) + +`cm docker script "build mlcommons mlperf inference inference-server server nvidia-harness nvidia[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * Group "**code**" +
+ Click here to expand this section. + + * **`_ctuning`** (default) + - Workflow: + * `_custom` + - Workflow: + * `_mlcommons` + - Workflow: + * `_nvidia-only` + - Workflow: + +
+ + + * Group "**device**" +
+ Click here to expand this section. + + * `_cpu` + - Environment variables: + - *CM_MLPERF_DEVICE*: `cpu` + - Workflow: + * **`_cuda`** (default) + - Environment variables: + - *CM_MLPERF_DEVICE*: `cuda` + - *CM_MLPERF_DEVICE_LIB_NAMESPEC*: `cudart` + - Workflow: + * `_inferentia` + - Environment variables: + - *CM_MLPERF_DEVICE*: `inferentia` + - Workflow: + +
+ + +#### Default variations + +`_ctuning,_cuda` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--clean=value` → `CM_MAKE_CLEAN=value` +* `--custom_system=value` → `CM_CUSTOM_SYSTEM_NVIDIA=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "clean":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_MAKE_BUILD_COMMAND: `build` +* CM_MAKE_CLEAN: `no` +* CM_CUSTOM_SYSTEM_NVIDIA: `yes` + +
+ +#### Versions +Default version: `r3.1` + +* `r2.1` +* `r3.0` +* `r3.1` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-mlperf-inference-server-nvidia/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,cuda,_cudnn + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_DEVICE': ['cuda', 'inferentia']}` + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * get,tensorrt,_dev + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_DEVICE': ['cuda', 'inferentia']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_TENSORRT_SYSTEM_DETECT': [True]}` + * CM names: `--adr.['tensorrt']...` + - CM script: [get-tensorrt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tensorrt) + * get,gcc + - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) + * get,cmake + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + * get,generic,sys-util,_glog-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic,sys-util,_gflags-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic,sys-util,_libgmock-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic,sys-util,_libre2-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic,sys-util,_libnuma-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic,sys-util,_libboost-all-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic,sys-util,_rapidjson-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,nvidia,mlperf,inference,common-code + * CM names: `--adr.['nvidia-inference-common-code']...` + - CM script: [get-mlperf-inference-nvidia-common-code](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-nvidia-common-code) + * get,generic-python-lib,_package.pybind11 + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_pycuda + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_RUN_STATE_DOCKER': ['yes', True, 'True']}` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_opencv-python + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_nvidia-dali + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,mlperf,inference,nvidia,scratch,space + * CM names: `--adr.['nvidia-scratch-space']...` + - CM script: [get-mlperf-inference-nvidia-scratch-space](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-nvidia-scratch-space) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-mlperf-inference-server-nvidia/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-mlperf-inference-server-nvidia/_cm.yaml) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-mlperf-inference-server-nvidia/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-mlperf-inference-server-nvidia/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-mlperf-inference-server-nvidia/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-mlperf-inference-server-nvidia/_cm.yaml)*** + * add,custom,system,nvidia + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_CUSTOM_SYSTEM_NVIDIA': ['no', False, 'False']}` + * CM names: `--adr.['custom-system-nvidia', 'nvidia-inference-common-code']...` + - CM script: [add-custom-nvidia-system](https://github.com/mlcommons/cm4mlops/tree/master/script/add-custom-nvidia-system) + +___ +### Script output +`cmr "build mlcommons mlperf inference inference-server server nvidia-harness nvidia [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_MLPERF_INFERENCE_NVIDIA_CODE_PATH` +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/generate-mlperf-inference-submission.md b/docs/MLPerf-benchmark-support/generate-mlperf-inference-submission.md new file mode 100644 index 0000000000..e3a085fab9 --- /dev/null +++ b/docs/MLPerf-benchmark-support/generate-mlperf-inference-submission.md @@ -0,0 +1,191 @@ +Automatically generated README for this automation recipe: **generate-mlperf-inference-submission** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=generate-mlperf-inference-submission,5f8ab2d0b5874d53) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-inference-submission)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *generate,submission,mlperf,mlperf-inference,inference,mlcommons,inference-submission,mlperf-inference-submission,mlcommons-inference-submission* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "generate submission mlperf mlperf-inference inference mlcommons inference-submission mlperf-inference-submission mlcommons-inference-submission" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=generate,submission,mlperf,mlperf-inference,inference,mlcommons,inference-submission,mlperf-inference-submission,mlcommons-inference-submission` + +`cm run script --tags=generate,submission,mlperf,mlperf-inference,inference,mlcommons,inference-submission,mlperf-inference-submission,mlcommons-inference-submission [--input_flags]` + +*or* + +`cmr "generate submission mlperf mlperf-inference inference mlcommons inference-submission mlperf-inference-submission mlcommons-inference-submission"` + +`cmr "generate submission mlperf mlperf-inference inference mlcommons inference-submission mlperf-inference-submission mlcommons-inference-submission " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'generate,submission,mlperf,mlperf-inference,inference,mlcommons,inference-submission,mlperf-inference-submission,mlcommons-inference-submission' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="generate,submission,mlperf,mlperf-inference,inference,mlcommons,inference-submission,mlperf-inference-submission,mlcommons-inference-submission"``` + +#### Run this script via Docker (beta) + +`cm docker script "generate submission mlperf mlperf-inference inference mlcommons inference-submission mlperf-inference-submission mlcommons-inference-submission" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--analyzer_settings_file=value` → `CM_MLPERF_POWER_ANALYZER_SETTINGS_FILE_PATH=value` +* `--category=value` → `CM_MLPERF_SUBMISSION_CATEGORY=value` +* `--clean=value` → `CM_MLPERF_CLEAN_SUBMISSION_DIR=value` +* `--dashboard=value` → `CM_MLPERF_DASHBOARD=value` +* `--dashboard_wb_project=value` → `CM_MLPERF_DASHBOARD_WANDB_PROJECT=value` +* `--device=value` → `CM_MLPERF_DEVICE=value` +* `--division=value` → `CM_MLPERF_SUBMISSION_DIVISION=value` +* `--duplicate=value` → `CM_MLPERF_DUPLICATE_SCENARIO_RESULTS=value` +* `--hw_name=value` → `CM_HW_NAME=value` +* `--hw_notes_extra=value` → `CM_MLPERF_SUT_HW_NOTES_EXTRA=value` +* `--infer_scenario_results=value` → `CM_MLPERF_DUPLICATE_SCENARIO_RESULTS=value` +* `--power_settings_file=value` → `CM_MLPERF_POWER_SETTINGS_FILE_PATH=value` +* `--preprocess=value` → `CM_RUN_MLPERF_SUBMISSION_PREPROCESSOR=value` +* `--preprocess_submission=value` → `CM_RUN_MLPERF_SUBMISSION_PREPROCESSOR=value` +* `--results_dir=value` → `CM_MLPERF_INFERENCE_RESULTS_DIR_=value` +* `--run_checker=value` → `CM_RUN_SUBMISSION_CHECKER=value` +* `--run_style=value` → `CM_MLPERF_RUN_STYLE=value` +* `--skip_truncation=value` → `CM_SKIP_TRUNCATE_ACCURACY=value` +* `--submission_dir=value` → `CM_MLPERF_INFERENCE_SUBMISSION_DIR=value` +* `--submitter=value` → `CM_MLPERF_SUBMITTER=value` +* `--sw_notes_extra=value` → `CM_MLPERF_SUT_SW_NOTES_EXTRA=value` +* `--tar=value` → `CM_TAR_SUBMISSION_DIR=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "analyzer_settings_file":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_RUN_MLPERF_ACCURACY: `on` +* CM_MLPERF_RUN_STYLE: `valid` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-inference-submission/_cm.json)*** + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * mlcommons,inference,src + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,sut,system-description + - CM script: [get-mlperf-inference-sut-description](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-sut-description) + * install,pip-package,for-cmind-python,_package.tabulate + - CM script: [install-pip-package-for-cmind-python](https://github.com/mlcommons/cm4mlops/tree/master/script/install-pip-package-for-cmind-python) + * get,mlperf,inference,utils + - CM script: [get-mlperf-inference-utils](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-utils) + * get,mlperf,results,dir + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_INFERENCE_RESULTS_DIR_': ['on']}` + * CM names: `--adr.['get-mlperf-results-dir']...` + - CM script: [get-mlperf-inference-results-dir](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-results-dir) + * get,mlperf,submission,dir + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_INFERENCE_SUBMISSION_DIR': ['on']}` + * CM names: `--adr.['get-mlperf-submission-dir']...` + - CM script: [get-mlperf-inference-submission-dir](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-submission-dir) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-inference-submission/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-inference-submission/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-inference-submission/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-inference-submission/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-inference-submission/_cm.json)*** + * accuracy,truncate,mlc + * Enable this dependency only if all ENV vars are set:
+`{'CM_RUN_MLPERF_ACCURACY': ['on']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_SKIP_TRUNCATE_ACCURACY': ['yes']}` + - CM script: [truncate-mlperf-inference-accuracy-log](https://github.com/mlcommons/cm4mlops/tree/master/script/truncate-mlperf-inference-accuracy-log) + * preprocess,mlperf,submission + * Enable this dependency only if all ENV vars are set:
+`{'CM_RUN_MLPERF_SUBMISSION_PREPROCESSOR': ['on', 'True', 'yes', True]}` + - CM script: [preprocess-mlperf-inference-submission](https://github.com/mlcommons/cm4mlops/tree/master/script/preprocess-mlperf-inference-submission) + * submission,inference,checker,mlc + * Enable this dependency only if all ENV vars are set:
+`{'CM_RUN_SUBMISSION_CHECKER': ['yes']}` + * CM names: `--adr.['mlperf-inference-submission-checker', 'submission-checker']...` + - CM script: [run-mlperf-inference-submission-checker](https://github.com/mlcommons/cm4mlops/tree/master/script/run-mlperf-inference-submission-checker) + +___ +### Script output +`cmr "generate submission mlperf mlperf-inference inference mlcommons inference-submission mlperf-inference-submission mlcommons-inference-submission " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/generate-mlperf-inference-user-conf.md b/docs/MLPerf-benchmark-support/generate-mlperf-inference-user-conf.md new file mode 100644 index 0000000000..f248f0b50e --- /dev/null +++ b/docs/MLPerf-benchmark-support/generate-mlperf-inference-user-conf.md @@ -0,0 +1,199 @@ +Automatically generated README for this automation recipe: **generate-mlperf-inference-user-conf** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Developers: [Arjun Suresh](https://www.linkedin.com/in/arjunsuresh), [Thomas Zhu](https://www.linkedin.com/in/hanwen-zhu-483614189), [Grigori Fursin](https://cKnowledge.org/gfursin) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=generate-mlperf-inference-user-conf,3af4475745964b93) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-inference-user-conf)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *generate,mlperf,inference,user-conf,inference-user-conf* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "generate mlperf inference user-conf inference-user-conf" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=generate,mlperf,inference,user-conf,inference-user-conf` + +`cm run script --tags=generate,mlperf,inference,user-conf,inference-user-conf [--input_flags]` + +*or* + +`cmr "generate mlperf inference user-conf inference-user-conf"` + +`cmr "generate mlperf inference user-conf inference-user-conf " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'generate,mlperf,inference,user-conf,inference-user-conf' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="generate,mlperf,inference,user-conf,inference-user-conf"``` + +#### Run this script via Docker (beta) + +`cm docker script "generate mlperf inference user-conf inference-user-conf" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--count=value` → `CM_MLPERF_LOADGEN_QUERY_COUNT=value` +* `--hw_name=value` → `CM_HW_NAME=value` +* `--mode=value` → `CM_MLPERF_LOADGEN_MODE=value` +* `--multistream_target_latency=value` → `CM_MLPERF_LOADGEN_MULTISTREAM_TARGET_LATENCY=value` +* `--num_threads=value` → `CM_NUM_THREADS=value` +* `--offline_target_qps=value` → `CM_MLPERF_LOADGEN_OFFLINE_TARGET_QPS=value` +* `--output_dir=value` → `OUTPUT_BASE_DIR=value` +* `--performance_sample_count=value` → `CM_MLPERF_PERFORMANCE_SAMPLE_COUNT=value` +* `--power=value` → `CM_MLPERF_POWER=value` +* `--regenerate_files=value` → `CM_REGENERATE_MEASURE_FILES=value` +* `--rerun=value` → `CM_RERUN=value` +* `--scenario=value` → `CM_MLPERF_LOADGEN_SCENARIO=value` +* `--server_target_qps=value` → `CM_MLPERF_LOADGEN_SERVER_TARGET_QPS=value` +* `--singlestream_target_latency=value` → `CM_MLPERF_LOADGEN_SINGLESTREAM_TARGET_LATENCY=value` +* `--target_latency=value` → `CM_MLPERF_LOADGEN_TARGET_LATENCY=value` +* `--target_qps=value` → `CM_MLPERF_LOADGEN_TARGET_QPS=value` +* `--test_query_count=value` → `CM_TEST_QUERY_COUNT=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "count":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_MLPERF_LOADGEN_MODE: `accuracy` +* CM_MLPERF_LOADGEN_SCENARIO: `Offline` +* CM_OUTPUT_FOLDER_NAME: `test_results` +* CM_MLPERF_RUN_STYLE: `test` +* CM_TEST_QUERY_COUNT: `10` +* CM_FAST_FACTOR: `5` +* CM_MLPERF_QUANTIZATION: `False` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-inference-user-conf/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,python + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,mlperf,results,dir + * Skip this dependenecy only if all ENV vars are set:
+`{'OUTPUT_BASE_DIR': ['on']}` + * CM names: `--adr.['get-mlperf-results-dir']...` + - CM script: [get-mlperf-inference-results-dir](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-results-dir) + * get,mlcommons,inference,src + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,sut,configs + - CM script: [get-mlperf-inference-sut-configs](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-sut-configs) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-inference-user-conf/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-inference-user-conf/_cm.yaml) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-inference-user-conf/_cm.yaml) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-inference-user-conf/_cm.yaml) + +___ +### Script output +`cmr "generate mlperf inference user-conf inference-user-conf " [--input_flags] -j` +#### New environment keys (filter) + +* `CM_HW_*` +* `CM_LOGS_DIR` +* `CM_MAX_EXAMPLES` +* `CM_MLPERF_*` +* `CM_SUT_*` +#### New environment keys auto-detected from customize + +* `CM_LOGS_DIR` +* `CM_MAX_EXAMPLES` +* `CM_MLPERF_ACCURACY_RESULTS_DIR` +* `CM_MLPERF_COMPLIANCE_RUN_POSTPONED` +* `CM_MLPERF_CONF` +* `CM_MLPERF_INFERENCE_AUDIT_PATH` +* `CM_MLPERF_INFERENCE_FINAL_RESULTS_DIR` +* `CM_MLPERF_INFERENCE_MIN_DURATION` +* `CM_MLPERF_LOADGEN_LOGS_DIR` +* `CM_MLPERF_LOADGEN_MODE` +* `CM_MLPERF_LOADGEN_QUERY_COUNT` +* `CM_MLPERF_LOADGEN_SCENARIO` +* `CM_MLPERF_LOADGEN_TARGET_LATENCY` +* `CM_MLPERF_LOADGEN_TARGET_QPS` +* `CM_MLPERF_OUTPUT_DIR` +* `CM_MLPERF_POWER_LOG_DIR` +* `CM_MLPERF_RANGING_USER_CONF` +* `CM_MLPERF_RUN_STYLE` +* `CM_MLPERF_SKIP_RUN` +* `CM_MLPERF_TESTING_USER_CONF` +* `CM_MLPERF_USER_CONF` +* `CM_MLPERF_USE_MAX_DURATION` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/generate-mlperf-tiny-report.md b/docs/MLPerf-benchmark-support/generate-mlperf-tiny-report.md new file mode 100644 index 0000000000..c2df0a411c --- /dev/null +++ b/docs/MLPerf-benchmark-support/generate-mlperf-tiny-report.md @@ -0,0 +1,145 @@ +Automatically generated README for this automation recipe: **generate-mlperf-tiny-report** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Developers: [Grigori Fursin](https://cKnowledge.org/gfursin) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=generate-mlperf-tiny-report,709c3f3f9b3e4783) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-report)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *generate,mlperf,tiny,mlperf-tiny,report* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "generate mlperf tiny mlperf-tiny report" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=generate,mlperf,tiny,mlperf-tiny,report` + +`cm run script --tags=generate,mlperf,tiny,mlperf-tiny,report [--input_flags]` + +*or* + +`cmr "generate mlperf tiny mlperf-tiny report"` + +`cmr "generate mlperf tiny mlperf-tiny report " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'generate,mlperf,tiny,mlperf-tiny,report' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="generate,mlperf,tiny,mlperf-tiny,report"``` + +#### Run this script via Docker (beta) + +`cm docker script "generate mlperf tiny mlperf-tiny report" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--repo_tags=value` → `CM_IMPORT_TINYMLPERF_REPO_TAGS=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "repo_tags":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_IMPORT_TINYMLPERF_REPO_TAGS: `1.1-private` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-report/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-python-lib,_xlsxwriter + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_pandas + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-report/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-report/_cm.yaml) + 1. ***Run native script if exists*** + * [run_submission_checker.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-report/run_submission_checker.bat) + * [run_submission_checker.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-report/run_submission_checker.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-report/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-report/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-report/_cm.yaml) + +___ +### Script output +`cmr "generate mlperf tiny mlperf-tiny report " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/generate-mlperf-tiny-submission.md b/docs/MLPerf-benchmark-support/generate-mlperf-tiny-submission.md new file mode 100644 index 0000000000..8e6de94270 --- /dev/null +++ b/docs/MLPerf-benchmark-support/generate-mlperf-tiny-submission.md @@ -0,0 +1,414 @@ +Automatically generated README for this automation recipe: **generate-mlperf-tiny-submission** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=generate-mlperf-tiny-submission,04289b9fc07b42b6) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-submission)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *generate,submission,mlperf,mlperf-tiny,tiny,mlcommons,tiny-submission,mlperf-tiny-submission,mlcommons-tiny-submission* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "generate submission mlperf mlperf-tiny tiny mlcommons tiny-submission mlperf-tiny-submission mlcommons-tiny-submission" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=generate,submission,mlperf,mlperf-tiny,tiny,mlcommons,tiny-submission,mlperf-tiny-submission,mlcommons-tiny-submission` + +`cm run script --tags=generate,submission,mlperf,mlperf-tiny,tiny,mlcommons,tiny-submission,mlperf-tiny-submission,mlcommons-tiny-submission ` + +*or* + +`cmr "generate submission mlperf mlperf-tiny tiny mlcommons tiny-submission mlperf-tiny-submission mlcommons-tiny-submission"` + +`cmr "generate submission mlperf mlperf-tiny tiny mlcommons tiny-submission mlperf-tiny-submission mlcommons-tiny-submission " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'generate,submission,mlperf,mlperf-tiny,tiny,mlcommons,tiny-submission,mlperf-tiny-submission,mlcommons-tiny-submission' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="generate,submission,mlperf,mlperf-tiny,tiny,mlcommons,tiny-submission,mlperf-tiny-submission,mlcommons-tiny-submission"``` + +#### Run this script via Docker (beta) + +`cm docker script "generate submission mlperf mlperf-tiny tiny mlcommons tiny-submission mlperf-tiny-submission mlcommons-tiny-submission" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-submission/_cm.json)*** + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,sut,system-description + - CM script: [get-mlperf-inference-sut-description](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-sut-description) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-submission/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-submission/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-submission/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-submission/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-submission/_cm.json)*** + * + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_RUN_STYLE': ['valid']}` + - CM script: [outdated-test-abtf-ssd-pytorch](outdated-test-abtf-ssd-pytorch) + - CM script: [run-how-to-run-server](https://github.com/how-to-run/server/tree/master/script/run-how-to-run-server) + - CM script: [get-dataset-cognata-mlcommons](https://github.com/mlcommons/cm4abtf/tree/master/script/get-dataset-cognata-mlcommons) + - CM script: [get-ml-model-abtf-ssd-pytorch](https://github.com/mlcommons/cm4abtf/tree/master/script/get-ml-model-abtf-ssd-pytorch) + - CM script: [test-ssd-resnet50-cognata-pytorch](https://github.com/mlcommons/cm4abtf/tree/master/script/test-ssd-resnet50-cognata-pytorch) + - CM script: [reproduce-ieee-acm-micro2023-paper-22](https://github.com/ctuning/cm4research/tree/master/script/reproduce-ieee-acm-micro2023-paper-22) + - CM script: [reproduce-ieee-acm-micro2023-paper-28](https://github.com/ctuning/cm4research/tree/master/script/reproduce-ieee-acm-micro2023-paper-28) + - CM script: [reproduce-ieee-acm-micro2023-paper-33](https://github.com/ctuning/cm4research/tree/master/script/reproduce-ieee-acm-micro2023-paper-33) + - CM script: [reproduce-ieee-acm-micro2023-paper-38](https://github.com/ctuning/cm4research/tree/master/script/reproduce-ieee-acm-micro2023-paper-38) + - CM script: [reproduce-ieee-acm-micro2023-paper-5](https://github.com/ctuning/cm4research/tree/master/script/reproduce-ieee-acm-micro2023-paper-5) + - CM script: [reproduce-ieee-acm-micro2023-paper-8](https://github.com/ctuning/cm4research/tree/master/script/reproduce-ieee-acm-micro2023-paper-8) + - CM script: [reproduce-ieee-acm-micro2023-paper-85](https://github.com/ctuning/cm4research/tree/master/script/reproduce-ieee-acm-micro2023-paper-85) + - CM script: [reproduce-ieee-acm-micro2023-paper-87](https://github.com/ctuning/cm4research/tree/master/script/reproduce-ieee-acm-micro2023-paper-87) + - CM script: [reproduce-ieee-acm-micro2023-paper-96](https://github.com/ctuning/cm4research/tree/master/script/reproduce-ieee-acm-micro2023-paper-96) + - CM script: [reproduce-ipol-paper-2022-439a](https://github.com/ctuning/cm4research/tree/master/script/reproduce-ipol-paper-2022-439a) + - CM script: [reproduce-neurips-paper-2022-arxiv-2204.09656](https://github.com/ctuning/cm4research/tree/master/script/reproduce-neurips-paper-2022-arxiv-2204.09656) + - CM script: [process-mlperf-inference-results](https://github.com/mlcommons/cm4mlperf-results/tree/master/script/process-mlperf-inference-results) + - CM script: [get-target-device](https://github.com/cknowledge/cm4research/tree/master/script/get-target-device) + - CM script: [run-refiners-hello-world](https://github.com/cknowledge/cm4research/tree/master/script/run-refiners-hello-world) + - CM script: [test-mlperf-loadgen-cpp](https://github.com/cknowledge/cm4research/tree/master/script/test-mlperf-loadgen-cpp) + - CM script: [test-mlperf-loadgen-py](https://github.com/cknowledge/cm4research/tree/master/script/test-mlperf-loadgen-py) + - CM script: [test-onnxruntime-cpp](https://github.com/cknowledge/cm4research/tree/master/script/test-onnxruntime-cpp) + - CM script: [app-generate-image-dalle-mini-jax-py](https://github.com/cknowledge/cm4research-private/tree/master/script/app-generate-image-dalle-mini-jax-py) + - CM script: [app-generate-image-stable-diffusion2-pytorch-cuda-py](https://github.com/cknowledge/cm4research-private/tree/master/script/app-generate-image-stable-diffusion2-pytorch-cuda-py) + - CM script: [app-image-classification-onnx-py-ck](https://github.com/cknowledge/cm4research-private/tree/master/script/app-image-classification-onnx-py-ck) + - CM script: [app-image-corner-detection-old](https://github.com/cknowledge/cm4research-private/tree/master/script/app-image-corner-detection-old) + - CM script: [app-mlperf-inference-nvidia](https://github.com/cknowledge/cm4research-private/tree/master/script/app-mlperf-inference-nvidia) + - CM script: [app-stable-diffusion-pytorch-cuda-py](https://github.com/cknowledge/cm4research-private/tree/master/script/app-stable-diffusion-pytorch-cuda-py) + - CM script: [get-axs](https://github.com/cknowledge/cm4research-private/tree/master/script/get-axs) + - CM script: [gui-llm](https://github.com/cknowledge/cm4research-private/tree/master/script/gui-llm) + - CM script: [not-needed--get-android-cmdline-tools](https://github.com/cknowledge/cm4research-private/tree/master/script/not-needed--get-android-cmdline-tools) + - CM script: [not-needed--install-android-cmdline-tools](https://github.com/cknowledge/cm4research-private/tree/master/script/not-needed--install-android-cmdline-tools) + - CM script: [test-script1](https://github.com/cknowledge/cm4research-private/tree/master/script/test-script1) + - CM script: [test-script2](https://github.com/cknowledge/cm4research-private/tree/master/script/test-script2) + - CM script: [test-script3](https://github.com/cknowledge/cm4research-private/tree/master/script/test-script3) + - CM script: [test-script4](https://github.com/cknowledge/cm4research-private/tree/master/script/test-script4) + - CM script: [test-script5](https://github.com/cknowledge/cm4research-private/tree/master/script/test-script5) + - CM script: [activate-python-venv](https://github.com/mlcommons/cm4mlops/tree/master/script/activate-python-venv) + - CM script: [add-custom-nvidia-system](https://github.com/mlcommons/cm4mlops/tree/master/script/add-custom-nvidia-system) + - CM script: [app-image-classification-onnx-py](https://github.com/mlcommons/cm4mlops/tree/master/script/app-image-classification-onnx-py) + - CM script: [app-image-classification-tf-onnx-cpp](https://github.com/mlcommons/cm4mlops/tree/master/script/app-image-classification-tf-onnx-cpp) + - CM script: [app-image-classification-torch-py](https://github.com/mlcommons/cm4mlops/tree/master/script/app-image-classification-torch-py) + - CM script: [app-image-classification-tvm-onnx-py](https://github.com/mlcommons/cm4mlops/tree/master/script/app-image-classification-tvm-onnx-py) + - CM script: [app-image-corner-detection](https://github.com/mlcommons/cm4mlops/tree/master/script/app-image-corner-detection) + - CM script: [app-loadgen-generic-python](https://github.com/mlcommons/cm4mlops/tree/master/script/app-loadgen-generic-python) + - CM script: [app-mlperf-inference](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference) + - CM script: [app-mlperf-inference-ctuning-cpp-tflite](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-ctuning-cpp-tflite) + - CM script: [app-mlperf-inference-dummy](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-dummy) + - CM script: [app-mlperf-inference-intel](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-intel) + - CM script: [app-mlperf-inference-mlcommons-cpp](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-mlcommons-cpp) + - CM script: [app-mlperf-inference-mlcommons-python](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-mlcommons-python) + - CM script: [app-mlperf-inference-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-nvidia) + - CM script: [app-mlperf-inference-qualcomm](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-qualcomm) + - CM script: [app-mlperf-training-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-training-nvidia) + - CM script: [app-mlperf-training-reference](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-training-reference) + - CM script: [app-stable-diffusion-onnx-py](https://github.com/mlcommons/cm4mlops/tree/master/script/app-stable-diffusion-onnx-py) + - CM script: [benchmark-any-mlperf-inference-implementation](https://github.com/mlcommons/cm4mlops/tree/master/script/benchmark-any-mlperf-inference-implementation) + - CM script: [benchmark-program](https://github.com/mlcommons/cm4mlops/tree/master/script/benchmark-program) + - CM script: [benchmark-program-mlperf](https://github.com/mlcommons/cm4mlops/tree/master/script/benchmark-program-mlperf) + - CM script: [build-docker-image](https://github.com/mlcommons/cm4mlops/tree/master/script/build-docker-image) + - CM script: [build-dockerfile](https://github.com/mlcommons/cm4mlops/tree/master/script/build-dockerfile) + - CM script: [build-mlperf-inference-server-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/build-mlperf-inference-server-nvidia) + - CM script: [calibrate-model-for.qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/calibrate-model-for.qaic) + - CM script: [compile-model-for.qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/compile-model-for.qaic) + - CM script: [compile-program](https://github.com/mlcommons/cm4mlops/tree/master/script/compile-program) + - CM script: [convert-csv-to-md](https://github.com/mlcommons/cm4mlops/tree/master/script/convert-csv-to-md) + - CM script: [convert-ml-model-huggingface-to-onnx](https://github.com/mlcommons/cm4mlops/tree/master/script/convert-ml-model-huggingface-to-onnx) + - CM script: [copy-to-clipboard](https://github.com/mlcommons/cm4mlops/tree/master/script/copy-to-clipboard) + - CM script: [create-conda-env](https://github.com/mlcommons/cm4mlops/tree/master/script/create-conda-env) + - CM script: [create-fpgaconvnet-app-tinyml](https://github.com/mlcommons/cm4mlops/tree/master/script/create-fpgaconvnet-app-tinyml) + - CM script: [create-fpgaconvnet-config-tinyml](https://github.com/mlcommons/cm4mlops/tree/master/script/create-fpgaconvnet-config-tinyml) + - CM script: [create-patch](https://github.com/mlcommons/cm4mlops/tree/master/script/create-patch) + - CM script: [destroy-terraform](https://github.com/mlcommons/cm4mlops/tree/master/script/destroy-terraform) + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + - CM script: [detect-sudo](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-sudo) + - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) + - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) + - CM script: [download-torrent](https://github.com/mlcommons/cm4mlops/tree/master/script/download-torrent) + - CM script: [dump-pip-freeze](https://github.com/mlcommons/cm4mlops/tree/master/script/dump-pip-freeze) + - CM script: [extract-file](https://github.com/mlcommons/cm4mlops/tree/master/script/extract-file) + - CM script: [fail](https://github.com/mlcommons/cm4mlops/tree/master/script/fail) + - CM script: [flash-tinyml-binary](https://github.com/mlcommons/cm4mlops/tree/master/script/flash-tinyml-binary) + - CM script: [generate-mlperf-inference-submission](https://github.com/mlcommons/cm4mlops/tree/master/script/generate-mlperf-inference-submission) + - CM script: [generate-mlperf-inference-user-conf](https://github.com/mlcommons/cm4mlops/tree/master/script/generate-mlperf-inference-user-conf) + - CM script: [generate-mlperf-tiny-report](https://github.com/mlcommons/cm4mlops/tree/master/script/generate-mlperf-tiny-report) + - CM script: [generate-mlperf-tiny-submission](https://github.com/mlcommons/cm4mlops/tree/master/script/generate-mlperf-tiny-submission) + - CM script: [generate-nvidia-engine](https://github.com/mlcommons/cm4mlops/tree/master/script/generate-nvidia-engine) + - CM script: [get-android-sdk](https://github.com/mlcommons/cm4mlops/tree/master/script/get-android-sdk) + - CM script: [get-aocl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-aocl) + - CM script: [get-aria2](https://github.com/mlcommons/cm4mlops/tree/master/script/get-aria2) + - CM script: [get-aws-cli](https://github.com/mlcommons/cm4mlops/tree/master/script/get-aws-cli) + - CM script: [get-bazel](https://github.com/mlcommons/cm4mlops/tree/master/script/get-bazel) + - CM script: [get-bert-squad-vocab](https://github.com/mlcommons/cm4mlops/tree/master/script/get-bert-squad-vocab) + - CM script: [get-blis](https://github.com/mlcommons/cm4mlops/tree/master/script/get-blis) + - CM script: [get-brew](https://github.com/mlcommons/cm4mlops/tree/master/script/get-brew) + - CM script: [get-ck](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ck) + - CM script: [get-ck-repo-mlops](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ck-repo-mlops) + - CM script: [get-cl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cl) + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + - CM script: [get-cmsis_5](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmsis_5) + - CM script: [get-compiler-flags](https://github.com/mlcommons/cm4mlops/tree/master/script/get-compiler-flags) + - CM script: [get-compiler-rust](https://github.com/mlcommons/cm4mlops/tree/master/script/get-compiler-rust) + - CM script: [get-conda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-conda) + - CM script: [get-croissant](https://github.com/mlcommons/cm4mlops/tree/master/script/get-croissant) + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + - CM script: [get-cuda-devices](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda-devices) + - CM script: [get-cudnn](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cudnn) + - CM script: [get-dataset-cifar10](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-cifar10) + - CM script: [get-dataset-cnndm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-cnndm) + - CM script: [get-dataset-coco](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-coco) + - CM script: [get-dataset-coco2014](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-coco2014) + - CM script: [get-dataset-criteo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-criteo) + - CM script: [get-dataset-imagenet-aux](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-aux) + - CM script: [get-dataset-imagenet-calibration](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-calibration) + - CM script: [get-dataset-imagenet-helper](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-helper) + - CM script: [get-dataset-imagenet-train](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-train) + - CM script: [get-dataset-imagenet-val](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-val) + - CM script: [get-dataset-kits19](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-kits19) + - CM script: [get-dataset-librispeech](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-librispeech) + - CM script: [get-dataset-openimages](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openimages) + - CM script: [get-dataset-openimages-annotations](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openimages-annotations) + - CM script: [get-dataset-openimages-calibration](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openimages-calibration) + - CM script: [get-dataset-openorca](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openorca) + - CM script: [get-dataset-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-squad) + - CM script: [get-dataset-squad-vocab](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-squad-vocab) + - CM script: [get-dlrm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dlrm) + - CM script: [get-dlrm-data-mlperf-inference](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dlrm-data-mlperf-inference) + - CM script: [get-docker](https://github.com/mlcommons/cm4mlops/tree/master/script/get-docker) + - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + - CM script: [get-github-cli](https://github.com/mlcommons/cm4mlops/tree/master/script/get-github-cli) + - CM script: [get-go](https://github.com/mlcommons/cm4mlops/tree/master/script/get-go) + - CM script: [get-google-saxml](https://github.com/mlcommons/cm4mlops/tree/master/script/get-google-saxml) + - CM script: [get-google-test](https://github.com/mlcommons/cm4mlops/tree/master/script/get-google-test) + - CM script: [get-ipol-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ipol-src) + - CM script: [get-java](https://github.com/mlcommons/cm4mlops/tree/master/script/get-java) + - CM script: [get-javac](https://github.com/mlcommons/cm4mlops/tree/master/script/get-javac) + - CM script: [get-lib-armnn](https://github.com/mlcommons/cm4mlops/tree/master/script/get-lib-armnn) + - CM script: [get-lib-dnnl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-lib-dnnl) + - CM script: [get-lib-protobuf](https://github.com/mlcommons/cm4mlops/tree/master/script/get-lib-protobuf) + - CM script: [get-lib-qaic-api](https://github.com/mlcommons/cm4mlops/tree/master/script/get-lib-qaic-api) + - CM script: [get-llvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-llvm) + - CM script: [get-microtvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-microtvm) + - CM script: [get-ml-model-3d-unet-kits19](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-3d-unet-kits19) + - CM script: [get-ml-model-bert-base-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-bert-base-squad) + - CM script: [get-ml-model-bert-large-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-bert-large-squad) + - CM script: [get-ml-model-dlrm-terabyte](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-dlrm-terabyte) + - CM script: [get-ml-model-efficientnet-lite](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-efficientnet-lite) + - CM script: [get-ml-model-gptj](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-gptj) + - CM script: [get-ml-model-huggingface-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-huggingface-zoo) + - CM script: [get-ml-model-llama2](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-llama2) + - CM script: [get-ml-model-mobilenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-mobilenet) + - CM script: [get-ml-model-neuralmagic-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-neuralmagic-zoo) + - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) + - CM script: [get-ml-model-retinanet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet) + - CM script: [get-ml-model-retinanet-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet-nvidia) + - CM script: [get-ml-model-rnnt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-rnnt) + - CM script: [get-ml-model-stable-diffusion](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-stable-diffusion) + - CM script: [get-ml-model-tiny-resnet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-tiny-resnet) + - CM script: [get-ml-model-using-imagenet-from-model-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-using-imagenet-from-model-zoo) + - CM script: [get-mlperf-inference-intel-scratch-space](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-intel-scratch-space) + - CM script: [get-mlperf-inference-loadgen](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-loadgen) + - CM script: [get-mlperf-inference-nvidia-common-code](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-nvidia-common-code) + - CM script: [get-mlperf-inference-nvidia-scratch-space](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-nvidia-scratch-space) + - CM script: [get-mlperf-inference-results](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-results) + - CM script: [get-mlperf-inference-results-dir](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-results-dir) + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + - CM script: [get-mlperf-inference-submission-dir](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-submission-dir) + - CM script: [get-mlperf-inference-sut-configs](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-sut-configs) + - CM script: [get-mlperf-inference-sut-description](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-sut-description) + - CM script: [get-mlperf-inference-utils](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-utils) + - CM script: [get-mlperf-logging](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-logging) + - CM script: [get-mlperf-power-dev](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-power-dev) + - CM script: [get-mlperf-tiny-eembc-energy-runner-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-tiny-eembc-energy-runner-src) + - CM script: [get-mlperf-tiny-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-tiny-src) + - CM script: [get-mlperf-training-nvidia-code](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-training-nvidia-code) + - CM script: [get-mlperf-training-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-training-src) + - CM script: [get-nvidia-docker](https://github.com/mlcommons/cm4mlops/tree/master/script/get-nvidia-docker) + - CM script: [get-nvidia-mitten](https://github.com/mlcommons/cm4mlops/tree/master/script/get-nvidia-mitten) + - CM script: [get-onnxruntime-prebuilt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-onnxruntime-prebuilt) + - CM script: [get-openssl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-openssl) + - CM script: [get-preprocessed-dataset-criteo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-criteo) + - CM script: [get-preprocesser-script-generic](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocesser-script-generic) + - CM script: [get-preprocessed-dataset-imagenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-imagenet) + - CM script: [get-preprocessed-dataset-kits19](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-kits19) + - CM script: [get-preprocessed-dataset-librispeech](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-librispeech) + - CM script: [get-preprocessed-dataset-openimages](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-openimages) + - CM script: [get-preprocessed-dataset-openorca](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-openorca) + - CM script: [get-preprocessed-dataset-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-squad) + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + - CM script: [get-qaic-apps-sdk](https://github.com/mlcommons/cm4mlops/tree/master/script/get-qaic-apps-sdk) + - CM script: [get-qaic-platform-sdk](https://github.com/mlcommons/cm4mlops/tree/master/script/get-qaic-platform-sdk) + - CM script: [get-qaic-software-kit](https://github.com/mlcommons/cm4mlops/tree/master/script/get-qaic-software-kit) + - CM script: [get-rclone](https://github.com/mlcommons/cm4mlops/tree/master/script/get-rclone) + - CM script: [get-rocm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-rocm) + - CM script: [get-spec-ptd](https://github.com/mlcommons/cm4mlops/tree/master/script/get-spec-ptd) + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + - CM script: [get-sys-utils-min](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-min) + - CM script: [get-tensorrt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tensorrt) + - CM script: [get-terraform](https://github.com/mlcommons/cm4mlops/tree/master/script/get-terraform) + - CM script: [get-tvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tvm) + - CM script: [get-tvm-model](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tvm-model) + - CM script: [get-xilinx-sdk](https://github.com/mlcommons/cm4mlops/tree/master/script/get-xilinx-sdk) + - CM script: [get-zendnn](https://github.com/mlcommons/cm4mlops/tree/master/script/get-zendnn) + - CM script: [get-zephyr](https://github.com/mlcommons/cm4mlops/tree/master/script/get-zephyr) + - CM script: [get-zephyr-sdk](https://github.com/mlcommons/cm4mlops/tree/master/script/get-zephyr-sdk) + - CM script: [gui](https://github.com/mlcommons/cm4mlops/tree/master/script/gui) + - CM script: [import-mlperf-inference-to-experiment](https://github.com/mlcommons/cm4mlops/tree/master/script/import-mlperf-inference-to-experiment) + - CM script: [import-mlperf-tiny-to-experiment](https://github.com/mlcommons/cm4mlops/tree/master/script/import-mlperf-tiny-to-experiment) + - CM script: [import-mlperf-training-to-experiment](https://github.com/mlcommons/cm4mlops/tree/master/script/import-mlperf-training-to-experiment) + - CM script: [install-aws-cli](https://github.com/mlcommons/cm4mlops/tree/master/script/install-aws-cli) + - CM script: [install-bazel](https://github.com/mlcommons/cm4mlops/tree/master/script/install-bazel) + - CM script: [install-cmake-prebuilt](https://github.com/mlcommons/cm4mlops/tree/master/script/install-cmake-prebuilt) + - CM script: [install-cuda-package-manager](https://github.com/mlcommons/cm4mlops/tree/master/script/install-cuda-package-manager) + - CM script: [install-cuda-prebuilt](https://github.com/mlcommons/cm4mlops/tree/master/script/install-cuda-prebuilt) + - CM script: [install-gcc-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-gcc-src) + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + - CM script: [install-gflags](https://github.com/mlcommons/cm4mlops/tree/master/script/install-gflags) + - CM script: [install-github-cli](https://github.com/mlcommons/cm4mlops/tree/master/script/install-github-cli) + - CM script: [install-ipex-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-ipex-from-src) + - CM script: [install-llvm-prebuilt](https://github.com/mlcommons/cm4mlops/tree/master/script/install-llvm-prebuilt) + - CM script: [install-llvm-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-llvm-src) + - CM script: [install-mlperf-logging-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-mlperf-logging-from-src) + - CM script: [install-nccl-libs](https://github.com/mlcommons/cm4mlops/tree/master/script/install-nccl-libs) + - CM script: [install-numactl-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-numactl-from-src) + - CM script: [install-onednn-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-onednn-from-src) + - CM script: [install-onnxruntime-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-onnxruntime-from-src) + - CM script: [install-openssl](https://github.com/mlcommons/cm4mlops/tree/master/script/install-openssl) + - CM script: [install-pip-package-for-cmind-python](https://github.com/mlcommons/cm4mlops/tree/master/script/install-pip-package-for-cmind-python) + - CM script: [install-python-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-python-src) + - CM script: [install-python-venv](https://github.com/mlcommons/cm4mlops/tree/master/script/install-python-venv) + - CM script: [install-pytorch-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-pytorch-from-src) + - CM script: [install-pytorch-kineto-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-pytorch-kineto-from-src) + - CM script: [install-qaic-compute-sdk-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-qaic-compute-sdk-from-src) + - CM script: [install-rocm](https://github.com/mlcommons/cm4mlops/tree/master/script/install-rocm) + - CM script: [install-tensorflow-for-c](https://github.com/mlcommons/cm4mlops/tree/master/script/install-tensorflow-for-c) + - CM script: [install-tensorflow-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-tensorflow-from-src) + - CM script: [install-terraform-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-terraform-from-src) + - CM script: [install-tflite-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-tflite-from-src) + - CM script: [install-torchvision-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-torchvision-from-src) + - CM script: [install-tpp-pytorch-extension](https://github.com/mlcommons/cm4mlops/tree/master/script/install-tpp-pytorch-extension) + - CM script: [install-transformers-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-transformers-from-src) + - CM script: [launch-benchmark](https://github.com/mlcommons/cm4mlops/tree/master/script/launch-benchmark) + - CM script: [prepare-training-data-bert](https://github.com/mlcommons/cm4mlops/tree/master/script/prepare-training-data-bert) + - CM script: [prepare-training-data-resnet](https://github.com/mlcommons/cm4mlops/tree/master/script/prepare-training-data-resnet) + - CM script: [preprocess-mlperf-inference-submission](https://github.com/mlcommons/cm4mlops/tree/master/script/preprocess-mlperf-inference-submission) + - CM script: [print-croissant-desc](https://github.com/mlcommons/cm4mlops/tree/master/script/print-croissant-desc) + - CM script: [print-hello-world](https://github.com/mlcommons/cm4mlops/tree/master/script/print-hello-world) + - CM script: [print-hello-world-java](https://github.com/mlcommons/cm4mlops/tree/master/script/print-hello-world-java) + - CM script: [print-hello-world-javac](https://github.com/mlcommons/cm4mlops/tree/master/script/print-hello-world-javac) + - CM script: [print-hello-world-py](https://github.com/mlcommons/cm4mlops/tree/master/script/print-hello-world-py) + - CM script: [print-python-version](https://github.com/mlcommons/cm4mlops/tree/master/script/print-python-version) + - CM script: [process-ae-users](https://github.com/mlcommons/cm4mlops/tree/master/script/process-ae-users) + - CM script: [process-mlperf-accuracy](https://github.com/mlcommons/cm4mlops/tree/master/script/process-mlperf-accuracy) + - CM script: [prune-bert-models](https://github.com/mlcommons/cm4mlops/tree/master/script/prune-bert-models) + - CM script: [prune-docker](https://github.com/mlcommons/cm4mlops/tree/master/script/prune-docker) + - CM script: [publish-results-to-dashboard](https://github.com/mlcommons/cm4mlops/tree/master/script/publish-results-to-dashboard) + - CM script: [pull-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/pull-git-repo) + - CM script: [push-csv-to-spreadsheet](https://github.com/mlcommons/cm4mlops/tree/master/script/push-csv-to-spreadsheet) + - CM script: [push-mlperf-inference-results-to-github](https://github.com/mlcommons/cm4mlops/tree/master/script/push-mlperf-inference-results-to-github) + - CM script: [remote-run-commands](https://github.com/mlcommons/cm4mlops/tree/master/script/remote-run-commands) + - CM script: [reproduce-ipol-paper-2022-439](https://github.com/mlcommons/cm4mlops/tree/master/script/reproduce-ipol-paper-2022-439) + - CM script: [reproduce-mlperf-octoml-tinyml-results](https://github.com/mlcommons/cm4mlops/tree/master/script/reproduce-mlperf-octoml-tinyml-results) + - CM script: [reproduce-mlperf-training-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/reproduce-mlperf-training-nvidia) + - CM script: [run-docker-container](https://github.com/mlcommons/cm4mlops/tree/master/script/run-docker-container) + - CM script: [run-mlperf-inference-app](https://github.com/mlcommons/cm4mlops/tree/master/script/run-mlperf-inference-app) + - CM script: [run-mlperf-inference-mobilenet-models](https://github.com/mlcommons/cm4mlops/tree/master/script/run-mlperf-inference-mobilenet-models) + - CM script: [run-mlperf-inference-submission-checker](https://github.com/mlcommons/cm4mlops/tree/master/script/run-mlperf-inference-submission-checker) + - CM script: [run-mlperf-power-client](https://github.com/mlcommons/cm4mlops/tree/master/script/run-mlperf-power-client) + - CM script: [run-mlperf-power-server](https://github.com/mlcommons/cm4mlops/tree/master/script/run-mlperf-power-server) + - CM script: [run-mlperf-training-submission-checker](https://github.com/mlcommons/cm4mlops/tree/master/script/run-mlperf-training-submission-checker) + - CM script: [run-python](https://github.com/mlcommons/cm4mlops/tree/master/script/run-python) + - CM script: [run-terraform](https://github.com/mlcommons/cm4mlops/tree/master/script/run-terraform) + - CM script: [save-mlperf-inference-implementation-state](https://github.com/mlcommons/cm4mlops/tree/master/script/save-mlperf-inference-implementation-state) + - CM script: [set-device-settings-qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/set-device-settings-qaic) + - CM script: [set-echo-off-win](https://github.com/mlcommons/cm4mlops/tree/master/script/set-echo-off-win) + - CM script: [set-performance-mode](https://github.com/mlcommons/cm4mlops/tree/master/script/set-performance-mode) + - CM script: [set-sqlite-dir](https://github.com/mlcommons/cm4mlops/tree/master/script/set-sqlite-dir) + - CM script: [set-venv](https://github.com/mlcommons/cm4mlops/tree/master/script/set-venv) + - CM script: [tar-my-folder](https://github.com/mlcommons/cm4mlops/tree/master/script/tar-my-folder) + - CM script: [test-deps-conditions](https://github.com/mlcommons/cm4mlops/tree/master/script/test-deps-conditions) + - CM script: [test-download-and-extract-artifacts](https://github.com/mlcommons/cm4mlops/tree/master/script/test-download-and-extract-artifacts) + - CM script: [test-mlperf-inference-retinanet](https://github.com/mlcommons/cm4mlops/tree/master/script/test-mlperf-inference-retinanet) + - CM script: [test-set-sys-user-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/test-set-sys-user-cm) + - CM script: [truncate-mlperf-inference-accuracy-log](https://github.com/mlcommons/cm4mlops/tree/master/script/truncate-mlperf-inference-accuracy-log) + - CM script: [upgrade-python-pip](https://github.com/mlcommons/cm4mlops/tree/master/script/upgrade-python-pip) + - CM script: [wrapper-reproduce-octoml-tinyml-submission](https://github.com/mlcommons/cm4mlops/tree/master/script/wrapper-reproduce-octoml-tinyml-submission) + +___ +### Script output +`cmr "generate submission mlperf mlperf-tiny tiny mlcommons tiny-submission mlperf-tiny-submission mlcommons-tiny-submission " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/generate-nvidia-engine.md b/docs/MLPerf-benchmark-support/generate-nvidia-engine.md new file mode 100644 index 0000000000..ca8540fe6f --- /dev/null +++ b/docs/MLPerf-benchmark-support/generate-nvidia-engine.md @@ -0,0 +1,244 @@ +Automatically generated README for this automation recipe: **generate-nvidia-engine** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=generate-nvidia-engine,0eef9f05b272401f) ]* + +--- + +This CM script is in draft stage + + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-nvidia-engine)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *generate,engine,mlperf,inference,nvidia* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "generate engine mlperf inference nvidia" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=generate,engine,mlperf,inference,nvidia` + +`cm run script --tags=generate,engine,mlperf,inference,nvidia[,variations] [--input_flags]` + +*or* + +`cmr "generate engine mlperf inference nvidia"` + +`cmr "generate engine mlperf inference nvidia [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'generate,engine,mlperf,inference,nvidia' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="generate,engine,mlperf,inference,nvidia"``` + +#### Run this script via Docker (beta) + +`cm docker script "generate engine mlperf inference nvidia[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_batch_size.#` + - Environment variables: + - *CM_MODEL_BATCH_SIZE*: `None` + - Workflow: + * `_copy_streams.#` + - Environment variables: + - *CM_GPU_COPY_STREAMS*: `None` + - Workflow: + * `_cuda` + - Environment variables: + - *CM_MLPERF_DEVICE*: `gpu` + - *CM_MLPERF_DEVICE_LIB_NAMESPEC*: `cudart` + - Workflow: + +
+ + + * Group "**device**" +
+ Click here to expand this section. + + * **`_cpu`** (default) + - Environment variables: + - *CM_MLPERF_DEVICE*: `cpu` + - Workflow: + +
+ + + * Group "**model**" +
+ Click here to expand this section. + + * **`_resnet50`** (default) + - Environment variables: + - *CM_MODEL*: `resnet50` + - Workflow: + * `_retinanet` + - Environment variables: + - *CM_MODEL*: `retinanet` + - Workflow: + +
+ + +#### Default variations + +`_cpu,_resnet50` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--output_dir=value` → `CM_MLPERF_OUTPUT_DIR=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "output_dir":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_BATCH_COUNT: `1` +* CM_BATCH_SIZE: `1` +* CM_LOADGEN_SCENARIO: `Offline` +* CM_GPU_COPY_STREAMS: `1` +* CM_TENSORRT_WORKSPACE_SIZE: `4194304` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-nvidia-engine/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,cuda,_cudnn + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * get,tensorrt + - CM script: [get-tensorrt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tensorrt) + * get,generic-python-lib,_numpy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_pycuda + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,loadgen + * CM names: `--adr.['loadgen']...` + - CM script: [get-mlperf-inference-loadgen](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-loadgen) + * get,mlcommons,inference,src + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,nvidia,mlperf,inference,common-code + * CM names: `--adr.['nvidia-inference-common-code']...` + - CM script: [get-mlperf-inference-nvidia-common-code](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-nvidia-common-code) + * get,dataset,preprocessed,imagenet,_NCHW + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['resnet50']}` + * CM names: `--adr.['imagenet-preprocessed']...` + - CM script: [get-preprocessed-dataset-imagenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-imagenet) + * get,ml-model,resnet50,_onnx + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['resnet50']}` + * CM names: `--adr.['ml-model', 'resnet50-model']...` + - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) + * get,dataset,preprocessed,openimages,_validation,_NCHW + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['retinanet']}` + * CM names: `--adr.['openimages-preprocessed']...` + - CM script: [get-preprocessed-dataset-openimages](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-openimages) + * get,ml-model,retinanet,_onnx,_fp32 + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['retinanet']}` + * CM names: `--adr.['ml-model', 'retinanet-model']...` + - CM script: [get-ml-model-retinanet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-nvidia-engine/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-nvidia-engine/_cm.yaml) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-nvidia-engine/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-nvidia-engine/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-nvidia-engine/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-nvidia-engine/_cm.yaml) + +___ +### Script output +`cmr "generate engine mlperf inference nvidia [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_DATASET_*` +* `CM_MLPERF_*` +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/get-mlperf-inference-intel-scratch-space.md b/docs/MLPerf-benchmark-support/get-mlperf-inference-intel-scratch-space.md new file mode 100644 index 0000000000..64717e3ce7 --- /dev/null +++ b/docs/MLPerf-benchmark-support/get-mlperf-inference-intel-scratch-space.md @@ -0,0 +1,161 @@ +Automatically generated README for this automation recipe: **get-mlperf-inference-intel-scratch-space** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-inference-intel-scratch-space,e83fca30851f45ef) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-intel-scratch-space)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,mlperf,inference,intel,scratch,space* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get mlperf inference intel scratch space" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,mlperf,inference,intel,scratch,space` + +`cm run script --tags=get,mlperf,inference,intel,scratch,space[,variations] [--input_flags]` + +*or* + +`cmr "get mlperf inference intel scratch space"` + +`cmr "get mlperf inference intel scratch space [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,mlperf,inference,intel,scratch,space' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,mlperf,inference,intel,scratch,space"``` + +#### Run this script via Docker (beta) + +`cm docker script "get mlperf inference intel scratch space[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * Group "**version**" +
+ Click here to expand this section. + + * `_version.#` + - Environment variables: + - *CM_INTEL_SCRATCH_SPACE_VERSION*: `#` + - Workflow: + * **`_version.4_0`** (default) + - Environment variables: + - *CM_INTEL_SCRATCH_SPACE_VERSION*: `4_0` + - Workflow: + +
+ + +#### Default variations + +`_version.4_0` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--scratch_path=value` → `MLPERF_INTEL_SCRATCH_PATH=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "scratch_path":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-intel-scratch-space/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-intel-scratch-space/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-intel-scratch-space/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-intel-scratch-space/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-intel-scratch-space/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-intel-scratch-space/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-intel-scratch-space/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-intel-scratch-space/_cm.json) + +___ +### Script output +`cmr "get mlperf inference intel scratch space [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_INTEL_MLPERF_SCRATCH_PATH` +* `CM_INTEL_SCRATCH_SPACE_VERSION` +#### New environment keys auto-detected from customize + +* `CM_INTEL_MLPERF_SCRATCH_PATH` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/get-mlperf-inference-loadgen.md b/docs/MLPerf-benchmark-support/get-mlperf-inference-loadgen.md new file mode 100644 index 0000000000..979a870b09 --- /dev/null +++ b/docs/MLPerf-benchmark-support/get-mlperf-inference-loadgen.md @@ -0,0 +1,224 @@ +Automatically generated README for this automation recipe: **get-mlperf-inference-loadgen** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-inference-loadgen,64c3d98d0ba04950) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-loadgen)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *get,loadgen,inference,inference-loadgen,mlperf,mlcommons* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get loadgen inference inference-loadgen mlperf mlcommons" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,loadgen,inference,inference-loadgen,mlperf,mlcommons` + +`cm run script --tags=get,loadgen,inference,inference-loadgen,mlperf,mlcommons[,variations] ` + +*or* + +`cmr "get loadgen inference inference-loadgen mlperf mlcommons"` + +`cmr "get loadgen inference inference-loadgen mlperf mlcommons [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,loadgen,inference,inference-loadgen,mlperf,mlcommons' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,loadgen,inference,inference-loadgen,mlperf,mlcommons"``` + +#### Run this script via Docker (beta) + +`cm docker script "get loadgen inference inference-loadgen mlperf mlcommons[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_copy` + - Workflow: + * `_custom-python` + - Environment variables: + - *CM_TMP_USE_CUSTOM_PYTHON*: `on` + - Workflow: + * `_download` + - Environment variables: + - *CM_DOWNLOAD_CHECKSUM*: `af3f9525965b2c1acc348fb882a5bfd1` + - *CM_MLPERF_INFERENCE_LOADGEN_DOWNLOAD*: `YES` + - *CM_MLPERF_INFERENCE_LOADGEN_DOWNLOAD_URL*: `https://www.dropbox.com/scl/fi/36dgoiur26i2tvwgsaatf/loadgen.zip?rlkey=ab68i7uza9anvaw0hk1xvf0qk&dl=0` + - *CM_MLPERF_INFERENCE_LOADGEN_VERSION*: `v3.1` + - *CM_VERIFY_SSL*: `False` + - Workflow: + * `_download_v3.1` + - Environment variables: + - *CM_DOWNLOAD_CHECKSUM*: `af3f9525965b2c1acc348fb882a5bfd1` + - *CM_MLPERF_INFERENCE_LOADGEN_DOWNLOAD*: `YES` + - *CM_MLPERF_INFERENCE_LOADGEN_DOWNLOAD_URL*: `https://www.dropbox.com/scl/fi/36dgoiur26i2tvwgsaatf/loadgen.zip?rlkey=ab68i7uza9anvaw0hk1xvf0qk&dl=0` + - *CM_MLPERF_INFERENCE_LOADGEN_VERSION*: `v3.1` + - *CM_VERIFY_SSL*: `False` + - Workflow: + * `_download_v4.0` + - Environment variables: + - *CM_DOWNLOAD_CHECKSUM*: `b4d97525d9ad0539a64667f2a3ca20c5` + - *CM_MLPERF_INFERENCE_LOADGEN_DOWNLOAD*: `YES` + - *CM_MLPERF_INFERENCE_LOADGEN_DOWNLOAD_URL*: `https://www.dropbox.com/scl/fi/gk5e9kziju5t56umxyzyx/loadgen.zip?rlkey=vsie4xnzml1inpjplm5cg7t54&dl=0` + - *CM_MLPERF_INFERENCE_LOADGEN_VERSION*: `v4.0` + - *CM_VERIFY_SSL*: `False` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_SHARED_BUILD: `no` + +
+ +#### Versions +Default version: `master` + +* `custom` +* `main` +* `master` +* `pybind_fix` +* `r2.1` +* `r3.0` +* `r3.1` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-loadgen/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,python3 + * CM names: `--adr.['python3', 'python']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,mlcommons,inference,src + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_INFERENCE_LOADGEN_DOWNLOAD': ['YES']}` + * CM names: `--adr.['inference-src-loadgen']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * download-and-extract,file,_wget,_extract + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_INFERENCE_LOADGEN_DOWNLOAD': ['YES']}` + * CM names: `--adr.['inference-src-loadgen-download']...` + - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) + * get,compiler + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_HOST_OS_TYPE': ['windows']}` + * CM names: `--adr.['compiler']...` + - CM script: [get-cl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cl) + - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) + - CM script: [get-llvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-llvm) + * get,cl + * Enable this dependency only if all ENV vars are set:
+`{'CM_HOST_OS_TYPE': ['windows']}` + * CM names: `--adr.['compiler']...` + - CM script: [get-cl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cl) + * get,cmake + * CM names: `--adr.['cmake']...` + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + * get,generic-python-lib,_package.wheel + * CM names: `--adr.['pip-package', 'wheel']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_pip + * CM names: `--adr.['pip-package', 'pip']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.pybind11 + * CM names: `--adr.['pip-package', 'pybind11']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.setuptools + * CM names: `--adr.['pip-package', 'setuputils']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-loadgen/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-loadgen/_cm.yaml) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-loadgen/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-loadgen/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-loadgen/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-loadgen/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-loadgen/_cm.yaml) + +___ +### Script output +`cmr "get loadgen inference inference-loadgen mlperf mlcommons [,variations]" -j` +#### New environment keys (filter) + +* `+CPLUS_INCLUDE_PATH` +* `+C_INCLUDE_PATH` +* `+DYLD_FALLBACK_LIBRARY_PATH` +* `+LD_LIBRARY_PATH` +* `+PYTHONPATH` +* `CM_MLPERF_INFERENCE_LOADGEN_*` +#### New environment keys auto-detected from customize + +* `CM_MLPERF_INFERENCE_LOADGEN_INCLUDE_PATH` +* `CM_MLPERF_INFERENCE_LOADGEN_INSTALL_PATH` +* `CM_MLPERF_INFERENCE_LOADGEN_LIBRARY_PATH` +* `CM_MLPERF_INFERENCE_LOADGEN_PYTHON_PATH` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/get-mlperf-inference-nvidia-common-code.md b/docs/MLPerf-benchmark-support/get-mlperf-inference-nvidia-common-code.md new file mode 100644 index 0000000000..81faf43e82 --- /dev/null +++ b/docs/MLPerf-benchmark-support/get-mlperf-inference-nvidia-common-code.md @@ -0,0 +1,150 @@ +Automatically generated README for this automation recipe: **get-mlperf-inference-nvidia-common-code** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-inference-nvidia-common-code,26b78bf3ffdc4926) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-common-code)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,nvidia,mlperf,inference,common-code* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get nvidia mlperf inference common-code" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,nvidia,mlperf,inference,common-code` + +`cm run script --tags=get,nvidia,mlperf,inference,common-code[,variations] ` + +*or* + +`cmr "get nvidia mlperf inference common-code"` + +`cmr "get nvidia mlperf inference common-code [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,nvidia,mlperf,inference,common-code' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,nvidia,mlperf,inference,common-code"``` + +#### Run this script via Docker (beta) + +`cm docker script "get nvidia mlperf inference common-code[variations]" ` + +___ +### Customization + + +#### Variations + + * Group "**repo-owner**" +
+ Click here to expand this section. + + * `_ctuning` + - Workflow: + * `_custom` + - Workflow: + * `_mlcommons` + - Workflow: + * `_nvidia-only` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `r3.1` + +* `r2.1` +* `r3.0` +* `r3.1` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-common-code/_cm.json)*** + * get,mlperf,inference,results + * CM names: `--adr.['mlperf-inference-results']...` + - CM script: [get-mlperf-inference-results](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-results) + - CM script: [get-mlperf-inference-results-dir](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-results-dir) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-common-code/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-common-code/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-common-code/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-common-code/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-common-code/_cm.json) + +___ +### Script output +`cmr "get nvidia mlperf inference common-code [,variations]" -j` +#### New environment keys (filter) + +* `+PYTHONPATH` +* `CM_MLPERF_INFERENCE_NVIDIA_CODE_PATH` +#### New environment keys auto-detected from customize + +* `CM_MLPERF_INFERENCE_NVIDIA_CODE_PATH` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/get-mlperf-inference-nvidia-scratch-space.md b/docs/MLPerf-benchmark-support/get-mlperf-inference-nvidia-scratch-space.md new file mode 100644 index 0000000000..845f71038f --- /dev/null +++ b/docs/MLPerf-benchmark-support/get-mlperf-inference-nvidia-scratch-space.md @@ -0,0 +1,162 @@ +Automatically generated README for this automation recipe: **get-mlperf-inference-nvidia-scratch-space** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-inference-nvidia-scratch-space,0b2bec8b29fb4ab7) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-scratch-space)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,mlperf,inference,nvidia,scratch,space* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get mlperf inference nvidia scratch space" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,mlperf,inference,nvidia,scratch,space` + +`cm run script --tags=get,mlperf,inference,nvidia,scratch,space[,variations] [--input_flags]` + +*or* + +`cmr "get mlperf inference nvidia scratch space"` + +`cmr "get mlperf inference nvidia scratch space [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,mlperf,inference,nvidia,scratch,space' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,mlperf,inference,nvidia,scratch,space"``` + +#### Run this script via Docker (beta) + +`cm docker script "get mlperf inference nvidia scratch space[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * Group "**version**" +
+ Click here to expand this section. + + * `_version.#` + - Environment variables: + - *CM_NVIDIA_SCRATCH_SPACE_VERSION*: `#` + - Workflow: + * **`_version.4_0`** (default) + - Environment variables: + - *CM_NVIDIA_SCRATCH_SPACE_VERSION*: `4_0` + - Workflow: + +
+ + +#### Default variations + +`_version.4_0` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--scratch_path=value` → `CM_NVIDIA_MLPERF_SCRATCH_PATH=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "scratch_path":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-scratch-space/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-scratch-space/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-scratch-space/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-scratch-space/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-scratch-space/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-scratch-space/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-scratch-space/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-scratch-space/_cm.json) + +___ +### Script output +`cmr "get mlperf inference nvidia scratch space [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_NVIDIA_MLPERF_SCRATCH_PATH` +* `CM_NVIDIA_SCRATCH_SPACE_VERSION` +* `MLPERF_SCRATCH_PATH` +#### New environment keys auto-detected from customize + +* `CM_NVIDIA_MLPERF_SCRATCH_PATH` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/get-mlperf-inference-results-dir.md b/docs/MLPerf-benchmark-support/get-mlperf-inference-results-dir.md new file mode 100644 index 0000000000..13e9352091 --- /dev/null +++ b/docs/MLPerf-benchmark-support/get-mlperf-inference-results-dir.md @@ -0,0 +1,159 @@ +Automatically generated README for this automation recipe: **get-mlperf-inference-results-dir** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-inference-results-dir,84f3c5aad5e1444b) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-results-dir)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,mlperf,inference,results,dir,directory* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get mlperf inference results dir directory" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,mlperf,inference,results,dir,directory` + +`cm run script --tags=get,mlperf,inference,results,dir,directory[,variations] [--input_flags]` + +*or* + +`cmr "get mlperf inference results dir directory"` + +`cmr "get mlperf inference results dir directory [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,mlperf,inference,results,dir,directory' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,mlperf,inference,results,dir,directory"``` + +#### Run this script via Docker (beta) + +`cm docker script "get mlperf inference results dir directory[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * Group "**version**" +
+ Click here to expand this section. + + * `_version.#` + - Environment variables: + - *CM_MLPERF_INFERENCE_RESULTS_VERSION*: `#` + - Workflow: + * **`_version.4_0`** (default) + - Environment variables: + - *CM_MLPERF_INFERENCE_RESULTS_VERSION*: `4_0` + - Workflow: + +
+ + +#### Default variations + +`_version.4_0` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--results_dir=value` → `CM_MLPERF_INFERENCE_RESULTS_DIR=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "results_dir":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-results-dir/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-results-dir/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-results-dir/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-results-dir/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-results-dir/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-results-dir/_cm.json) + +___ +### Script output +`cmr "get mlperf inference results dir directory [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_MLPERF_INFERENCE_RESULTS_DIR` +* `CM_MLPERF_INFERENCE_RESULTS_VERSION` +#### New environment keys auto-detected from customize + +* `CM_MLPERF_INFERENCE_RESULTS_DIR` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/get-mlperf-inference-results.md b/docs/MLPerf-benchmark-support/get-mlperf-inference-results.md new file mode 100644 index 0000000000..75b92569e7 --- /dev/null +++ b/docs/MLPerf-benchmark-support/get-mlperf-inference-results.md @@ -0,0 +1,163 @@ +Automatically generated README for this automation recipe: **get-mlperf-inference-results** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-inference-results,36bae5b25dbe41da) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-results)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,results,inference,inference-results,mlcommons,mlperf* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get results inference inference-results mlcommons mlperf" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,results,inference,inference-results,mlcommons,mlperf` + +`cm run script --tags=get,results,inference,inference-results,mlcommons,mlperf[,variations] ` + +*or* + +`cmr "get results inference inference-results mlcommons mlperf"` + +`cmr "get results inference inference-results mlcommons mlperf [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,results,inference,inference-results,mlcommons,mlperf' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,results,inference,inference-results,mlcommons,mlperf"``` + +#### Run this script via Docker (beta) + +`cm docker script "get results inference inference-results mlcommons mlperf[variations]" ` + +___ +### Customization + + +#### Variations + + * Group "**source-repo**" +
+ Click here to expand this section. + + * `_ctuning` + - Environment variables: + - *GITHUB_REPO_OWNER*: `ctuning` + - Workflow: + * `_custom` + - Environment variables: + - *GITHUB_REPO_OWNER*: `arjunsuresh` + - Workflow: + * **`_mlcommons`** (default) + - Environment variables: + - *GITHUB_REPO_OWNER*: `mlcommons` + - Workflow: + * `_nvidia-only` + - Environment variables: + - *GITHUB_REPO_OWNER*: `GATEOverflow` + - *NVIDIA_ONLY*: `yes` + - Workflow: + +
+ + +#### Default variations + +`_mlcommons` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_GIT_CHECKOUT: `master` +* CM_GIT_DEPTH: `--depth 1` +* CM_GIT_PATCH: `no` + +
+ +#### Versions +Default version: `v3.1` + +* `v2.1` +* `v3.0` +* `v3.1` +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-results/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-results/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-results/_cm.json)*** + * get,git,repo + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-results/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-results/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-results/_cm.json) + +___ +### Script output +`cmr "get results inference inference-results mlcommons mlperf [,variations]" -j` +#### New environment keys (filter) + +* `CM_MLPERF_INFERENCE_RESULTS_*` +#### New environment keys auto-detected from customize + +* `CM_MLPERF_INFERENCE_RESULTS_PATH` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/get-mlperf-inference-src.md b/docs/MLPerf-benchmark-support/get-mlperf-inference-src.md new file mode 100644 index 0000000000..abc42a7a37 --- /dev/null +++ b/docs/MLPerf-benchmark-support/get-mlperf-inference-src.md @@ -0,0 +1,266 @@ +Automatically generated README for this automation recipe: **get-mlperf-inference-src** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-inference-src,4b57186581024797) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-src)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,src,source,inference,inference-src,inference-source,mlperf,mlcommons* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get src source inference inference-src inference-source mlperf mlcommons" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,src,source,inference,inference-src,inference-source,mlperf,mlcommons` + +`cm run script --tags=get,src,source,inference,inference-src,inference-source,mlperf,mlcommons[,variations] ` + +*or* + +`cmr "get src source inference inference-src inference-source mlperf mlcommons"` + +`cmr "get src source inference inference-src inference-source mlperf mlcommons [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,src,source,inference,inference-src,inference-source,mlperf,mlcommons' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,src,source,inference,inference-src,inference-source,mlperf,mlcommons"``` + +#### Run this script via Docker (beta) + +`cm docker script "get src source inference inference-src inference-source mlperf mlcommons[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_3d-unet` + - Environment variables: + - *CM_SUBMODULE_3D_UNET*: `yes` + - Workflow: + * `_deeplearningexamples` + - Environment variables: + - *CM_SUBMODULE_DEEPLEARNINGEXAMPLES*: `yes` + - Workflow: + * `_deepsparse` + - Environment variables: + - *CM_GIT_CHECKOUT*: `deepsparse` + - *CM_GIT_URL*: `https://github.com/neuralmagic/inference` + - *CM_MLPERF_LAST_RELEASE*: `v4.0` + - Workflow: + * `_gn` + - Environment variables: + - *CM_SUBMODULE_GN*: `yes` + - Workflow: + * `_no-recurse-submodules` + - Environment variables: + - *CM_GIT_RECURSE_SUBMODULES*: `` + - Workflow: + * `_nvidia-pycocotools` + - Environment variables: + - *CM_GIT_PATCH_FILENAME*: `coco.patch` + - Workflow: + * `_octoml` + - Environment variables: + - *CM_GIT_URL*: `https://github.com/octoml/inference` + - Workflow: + * `_openimages-nvidia-pycocotools` + - Environment variables: + - *CM_GIT_PATCH_FILENAME*: `openimages-pycocotools.patch` + - Workflow: + * `_patch` + - Environment variables: + - *CM_GIT_PATCH*: `yes` + - Workflow: + * `_pybind` + - Environment variables: + - *CM_SUBMODULE_PYBIND*: `yes` + - Workflow: + * `_recurse-submodules` + - Environment variables: + - *CM_GIT_RECURSE_SUBMODULES*: ` --recurse-submodules` + - Workflow: + * `_repo.#` + - Environment variables: + - *CM_GIT_URL*: `#` + - Workflow: + * `_submodules.#` + - Environment variables: + - *CM_GIT_SUBMODULES*: `#` + - Workflow: + +
+ + + * Group "**checkout**" +
+ Click here to expand this section. + + * `_branch.#` + - Environment variables: + - *CM_GIT_CHECKOUT*: `#` + - Workflow: + * `_sha.#` + - Environment variables: + - *CM_GIT_SHA*: `#` + - Workflow: + +
+ + + * Group "**git-history**" +
+ Click here to expand this section. + + * `_full-history` + - Environment variables: + - *CM_GIT_DEPTH*: `` + - Workflow: + * **`_short-history`** (default) + - Environment variables: + - *CM_GIT_DEPTH*: `--depth 10` + - Workflow: + +
+ + +#### Default variations + +`_short-history` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_GIT_CHECKOUT_FOLDER: `inference` +* CM_GIT_DEPTH: `--depth 4` +* CM_GIT_PATCH: `no` +* CM_GIT_RECURSE_SUBMODULES: `` +* CM_GIT_URL: `https://github.com/mlcommons/inference.git` + +
+ +#### Versions +Default version: `master` + +* `custom` +* `deepsparse` +* `main` +* `master` +* `pybind_fix` +* `r2.1` +* `r3.0` +* `r3.1` +* `tvm` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-src/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-src/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-src/_cm.json)*** + * get,git,repo + * CM names: `--adr.['inference-git-repo']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-src/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-src/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-src/_cm.json) + +___ +### Script output +`cmr "get src source inference inference-src inference-source mlperf mlcommons [,variations]" -j` +#### New environment keys (filter) + +* `+PYTHONPATH` +* `CM_MLPERF_INFERENCE_3DUNET_PATH` +* `CM_MLPERF_INFERENCE_BERT_PATH` +* `CM_MLPERF_INFERENCE_CLASSIFICATION_AND_DETECTION_PATH` +* `CM_MLPERF_INFERENCE_CONF_PATH` +* `CM_MLPERF_INFERENCE_DLRM_PATH` +* `CM_MLPERF_INFERENCE_DLRM_V2_PATH` +* `CM_MLPERF_INFERENCE_GPTJ_PATH` +* `CM_MLPERF_INFERENCE_RNNT_PATH` +* `CM_MLPERF_INFERENCE_SOURCE` +* `CM_MLPERF_INFERENCE_VERSION` +* `CM_MLPERF_INFERENCE_VISION_PATH` +* `CM_MLPERF_LAST_RELEASE` +#### New environment keys auto-detected from customize + +* `CM_MLPERF_INFERENCE_3DUNET_PATH` +* `CM_MLPERF_INFERENCE_BERT_PATH` +* `CM_MLPERF_INFERENCE_CLASSIFICATION_AND_DETECTION_PATH` +* `CM_MLPERF_INFERENCE_CONF_PATH` +* `CM_MLPERF_INFERENCE_DLRM_PATH` +* `CM_MLPERF_INFERENCE_DLRM_V2_PATH` +* `CM_MLPERF_INFERENCE_GPTJ_PATH` +* `CM_MLPERF_INFERENCE_RNNT_PATH` +* `CM_MLPERF_INFERENCE_VISION_PATH` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/get-mlperf-inference-submission-dir.md b/docs/MLPerf-benchmark-support/get-mlperf-inference-submission-dir.md new file mode 100644 index 0000000000..94a3aa684c --- /dev/null +++ b/docs/MLPerf-benchmark-support/get-mlperf-inference-submission-dir.md @@ -0,0 +1,159 @@ +Automatically generated README for this automation recipe: **get-mlperf-inference-submission-dir** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-inference-submission-dir,ddf36a41d6934a7e) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-submission-dir)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,mlperf,inference,submission,dir,directory* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get mlperf inference submission dir directory" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,mlperf,inference,submission,dir,directory` + +`cm run script --tags=get,mlperf,inference,submission,dir,directory[,variations] [--input_flags]` + +*or* + +`cmr "get mlperf inference submission dir directory"` + +`cmr "get mlperf inference submission dir directory [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,mlperf,inference,submission,dir,directory' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,mlperf,inference,submission,dir,directory"``` + +#### Run this script via Docker (beta) + +`cm docker script "get mlperf inference submission dir directory[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * Group "**version**" +
+ Click here to expand this section. + + * `_version.#` + - Environment variables: + - *CM_MLPERF_INFERENCE_SUBMISSION_VERSION*: `#` + - Workflow: + * **`_version.4_0`** (default) + - Environment variables: + - *CM_MLPERF_INFERENCE_SUBMISSION_VERSION*: `4_0` + - Workflow: + +
+ + +#### Default variations + +`_version.4_0` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--submission_dir=value` → `CM_MLPERF_INFERENCE_SUBMISSION_DIR=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "submission_dir":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-submission-dir/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-submission-dir/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-submission-dir/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-submission-dir/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-submission-dir/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-submission-dir/_cm.json) + +___ +### Script output +`cmr "get mlperf inference submission dir directory [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_MLPERF_INFERENCE_SUBMISSION_DIR` +* `CM_MLPERF_INFERENCE_SUBMISSION_VERSION` +#### New environment keys auto-detected from customize + +* `CM_MLPERF_INFERENCE_SUBMISSION_DIR` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/get-mlperf-inference-sut-configs.md b/docs/MLPerf-benchmark-support/get-mlperf-inference-sut-configs.md new file mode 100644 index 0000000000..9d5a01f195 --- /dev/null +++ b/docs/MLPerf-benchmark-support/get-mlperf-inference-sut-configs.md @@ -0,0 +1,161 @@ +Automatically generated README for this automation recipe: **get-mlperf-inference-sut-configs** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-inference-sut-configs,c2fbf72009e2445b) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-sut-configs)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,mlperf,inference,sut,configs,sut-configs* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get mlperf inference sut configs sut-configs" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,mlperf,inference,sut,configs,sut-configs` + +`cm run script --tags=get,mlperf,inference,sut,configs,sut-configs[,variations] [--input_flags]` + +*or* + +`cmr "get mlperf inference sut configs sut-configs"` + +`cmr "get mlperf inference sut configs sut-configs [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,mlperf,inference,sut,configs,sut-configs' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,mlperf,inference,sut,configs,sut-configs"``` + +#### Run this script via Docker (beta) + +`cm docker script "get mlperf inference sut configs sut-configs[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_octoml` + - Environment variables: + - *CM_SUT_USE_EXTERNAL_CONFIG_REPO*: `yes` + - *CM_GIT_CHECKOUT_FOLDER*: `configs` + - *CM_GIT_URL*: `https://github.com/arjunsuresh/mlperf-inference-configs` + - Workflow: + 1. ***Read "prehook_deps" on other CM scripts*** + * get,git,repo,_repo.mlperf_inference_configs_octoml + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + +
+ + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--configs_git_url=value` → `CM_GIT_URL=value` +* `--repo_path=value` → `CM_SUT_CONFIGS_PATH=value` +* `--run_config=value` → `CM_MLPERF_SUT_NAME_RUN_CONFIG_SUFFIX=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "configs_git_url":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_SUT_CONFIGS_PATH: `` +* CM_GIT_URL: `` + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-sut-configs/_cm.json) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-sut-configs/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-sut-configs/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-sut-configs/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-sut-configs/_cm.json) + +___ +### Script output +`cmr "get mlperf inference sut configs sut-configs [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_HW_*` +* `CM_SUT_*` +#### New environment keys auto-detected from customize + +* `CM_HW_NAME` +* `CM_SUT_NAME` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/get-mlperf-inference-sut-description.md b/docs/MLPerf-benchmark-support/get-mlperf-inference-sut-description.md new file mode 100644 index 0000000000..4f2f559a5e --- /dev/null +++ b/docs/MLPerf-benchmark-support/get-mlperf-inference-sut-description.md @@ -0,0 +1,159 @@ +Automatically generated README for this automation recipe: **get-mlperf-inference-sut-description** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-inference-sut-description,e49a3f758b2d4e7b) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-sut-description)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,mlperf,sut,description,system-under-test,system-description* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get mlperf sut description system-under-test system-description" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,mlperf,sut,description,system-under-test,system-description` + +`cm run script --tags=get,mlperf,sut,description,system-under-test,system-description [--input_flags]` + +*or* + +`cmr "get mlperf sut description system-under-test system-description"` + +`cmr "get mlperf sut description system-under-test system-description " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,mlperf,sut,description,system-under-test,system-description' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,mlperf,sut,description,system-under-test,system-description"``` + +#### Run this script via Docker (beta) + +`cm docker script "get mlperf sut description system-under-test system-description" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--name=value` → `CM_HW_NAME=value` +* `--submitter=value` → `CM_MLPERF_SUBMITTER=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "name":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_SUT_DESC_CACHE: `no` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-sut-description/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,python3 + * CM names: `--adr.['python3', 'python']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,compiler + * CM names: `--adr.['compiler']...` + - CM script: [get-cl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cl) + - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) + - CM script: [get-llvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-llvm) + * get,cuda-devices + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_DEVICE': ['gpu', 'cuda']}` + - CM script: [get-cuda-devices](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda-devices) + * detect,sudo + * Enable this dependency only if all ENV vars are set:
+`{'CM_DETERMINE_MEMORY_CONFIGURATION': ['yes'], 'CM_HOST_OS_TYPE': ['linux']}` + - CM script: [detect-sudo](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-sudo) + * get,generic-python-lib,_package.dmiparser + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-sut-description/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-sut-description/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-sut-description/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-sut-description/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-sut-description/_cm.json) + +___ +### Script output +`cmr "get mlperf sut description system-under-test system-description " [--input_flags] -j` +#### New environment keys (filter) + +* `CM_HW_*` +* `CM_SUT_*` +#### New environment keys auto-detected from customize + +* `CM_HW_NAME` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/get-mlperf-logging.md b/docs/MLPerf-benchmark-support/get-mlperf-logging.md new file mode 100644 index 0000000000..02dea12178 --- /dev/null +++ b/docs/MLPerf-benchmark-support/get-mlperf-logging.md @@ -0,0 +1,127 @@ +Automatically generated README for this automation recipe: **get-mlperf-logging** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-logging,c9830dc6f87b4dc6) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-logging)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,mlperf,logging,mlperf-logging* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get mlperf logging mlperf-logging" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,mlperf,logging,mlperf-logging` + +`cm run script --tags=get,mlperf,logging,mlperf-logging ` + +*or* + +`cmr "get mlperf logging mlperf-logging"` + +`cmr "get mlperf logging mlperf-logging " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,mlperf,logging,mlperf-logging' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,mlperf,logging,mlperf-logging"``` + +#### Run this script via Docker (beta) + +`cm docker script "get mlperf logging mlperf-logging" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-logging/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,git,repo,_repo.https://github.com/mlcommons/logging + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-logging/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-logging/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-logging/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-logging/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-logging/_cm.json) + +___ +### Script output +`cmr "get mlperf logging mlperf-logging " -j` +#### New environment keys (filter) + +* `+PYTHONPATH` +* `CM_MLPERF_LOGGING_*` +#### New environment keys auto-detected from customize + +* `CM_MLPERF_LOGGING_SRC_PATH` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/get-mlperf-power-dev.md b/docs/MLPerf-benchmark-support/get-mlperf-power-dev.md new file mode 100644 index 0000000000..0df4b325bd --- /dev/null +++ b/docs/MLPerf-benchmark-support/get-mlperf-power-dev.md @@ -0,0 +1,171 @@ +Automatically generated README for this automation recipe: **get-mlperf-power-dev** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-power-dev,72aa56768c994bcf) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-power-dev)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,src,source,power,power-dev,mlperf,mlcommons* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get src source power power-dev mlperf mlcommons" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,src,source,power,power-dev,mlperf,mlcommons` + +`cm run script --tags=get,src,source,power,power-dev,mlperf,mlcommons[,variations] ` + +*or* + +`cmr "get src source power power-dev mlperf mlcommons"` + +`cmr "get src source power power-dev mlperf mlcommons [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,src,source,power,power-dev,mlperf,mlcommons' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,src,source,power,power-dev,mlperf,mlcommons"``` + +#### Run this script via Docker (beta) + +`cm docker script "get src source power power-dev mlperf mlcommons[variations]" ` + +___ +### Customization + + +#### Variations + + * Group "**checkout**" +
+ Click here to expand this section. + + * `_branch.#` + - Environment variables: + - *CM_GIT_CHECKOUT*: `#` + - Workflow: + * `_sha.#` + - Environment variables: + - *CM_GIT_SHA*: `#` + - Workflow: + * `_tag.#` + - Environment variables: + - *CM_GIT_CHECKOUT_TAG*: `#` + - Workflow: + +
+ + + * Group "**repo**" +
+ Click here to expand this section. + + * **`_mlcommons`** (default) + - Environment variables: + - *CM_GIT_URL*: `https://github.com/mlcommons/power-dev.git` + - Workflow: + * `_octoml` + - Environment variables: + - *CM_GIT_URL*: `https://github.com/octoml/power-dev.git` + - Workflow: + * `_repo.#` + - Environment variables: + - *CM_GIT_URL*: `#` + - Workflow: + +
+ + +#### Default variations + +`_mlcommons` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_GIT_DEPTH: `--depth 1` +* CM_GIT_PATCH: `no` +* CM_GIT_CHECKOUT_FOLDER: `power-dev` + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-power-dev/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-power-dev/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-power-dev/_cm.json)*** + * get,git,repo + * CM names: `--adr.['mlperf-power-dev-git-repo']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-power-dev/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-power-dev/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-power-dev/_cm.json) + +___ +### Script output +`cmr "get src source power power-dev mlperf mlcommons [,variations]" -j` +#### New environment keys (filter) + +* `CM_MLPERF_POWER_SOURCE` +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/get-mlperf-tiny-eembc-energy-runner-src.md b/docs/MLPerf-benchmark-support/get-mlperf-tiny-eembc-energy-runner-src.md new file mode 100644 index 0000000000..f06d6d2aae --- /dev/null +++ b/docs/MLPerf-benchmark-support/get-mlperf-tiny-eembc-energy-runner-src.md @@ -0,0 +1,129 @@ +Automatically generated README for this automation recipe: **get-mlperf-tiny-eembc-energy-runner-src** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-tiny-eembc-energy-runner-src,c7da8d1ce4164a4b) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-eembc-energy-runner-src)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,src,source,eembc,energyrunner,energy-runner,eembc-energy-runner,tinymlperf-energy-runner* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get src source eembc energyrunner energy-runner eembc-energy-runner tinymlperf-energy-runner" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,src,source,eembc,energyrunner,energy-runner,eembc-energy-runner,tinymlperf-energy-runner` + +`cm run script --tags=get,src,source,eembc,energyrunner,energy-runner,eembc-energy-runner,tinymlperf-energy-runner ` + +*or* + +`cmr "get src source eembc energyrunner energy-runner eembc-energy-runner tinymlperf-energy-runner"` + +`cmr "get src source eembc energyrunner energy-runner eembc-energy-runner tinymlperf-energy-runner " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,src,source,eembc,energyrunner,energy-runner,eembc-energy-runner,tinymlperf-energy-runner' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,src,source,eembc,energyrunner,energy-runner,eembc-energy-runner,tinymlperf-energy-runner"``` + +#### Run this script via Docker (beta) + +`cm docker script "get src source eembc energyrunner energy-runner eembc-energy-runner tinymlperf-energy-runner" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_GIT_CHECKOUT: `main` +* CM_GIT_PATCH: `no` +* CM_GIT_RECURSE_SUBMODULES: `` +* CM_GIT_URL: `https://github.com/eembc/energyrunner` + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-eembc-energy-runner-src/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-eembc-energy-runner-src/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-eembc-energy-runner-src/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-eembc-energy-runner-src/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-eembc-energy-runner-src/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-eembc-energy-runner-src/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-eembc-energy-runner-src/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-eembc-energy-runner-src/_cm.json) + +___ +### Script output +`cmr "get src source eembc energyrunner energy-runner eembc-energy-runner tinymlperf-energy-runner " -j` +#### New environment keys (filter) + +* `+PYTHONPATH` +* `CM_EEMBC_ENERGY_RUNNER_*` +#### New environment keys auto-detected from customize + +* `CM_EEMBC_ENERGY_RUNNER_DATASETS` +* `CM_EEMBC_ENERGY_RUNNER_SESSIONS` +* `CM_EEMBC_ENERGY_RUNNER_SRC` +* `CM_EEMBC_ENERGY_RUNNER_SRC_DATASETS` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/get-mlperf-tiny-src.md b/docs/MLPerf-benchmark-support/get-mlperf-tiny-src.md new file mode 100644 index 0000000000..7706dac158 --- /dev/null +++ b/docs/MLPerf-benchmark-support/get-mlperf-tiny-src.md @@ -0,0 +1,143 @@ +Automatically generated README for this automation recipe: **get-mlperf-tiny-src** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-tiny-src,777843a0bb034524) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-src)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,src,source,tiny,tiny-src,tiny-source,tinymlperf,tinymlperf-src,mlperf,mlcommons* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get src source tiny tiny-src tiny-source tinymlperf tinymlperf-src mlperf mlcommons" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,src,source,tiny,tiny-src,tiny-source,tinymlperf,tinymlperf-src,mlperf,mlcommons` + +`cm run script --tags=get,src,source,tiny,tiny-src,tiny-source,tinymlperf,tinymlperf-src,mlperf,mlcommons ` + +*or* + +`cmr "get src source tiny tiny-src tiny-source tinymlperf tinymlperf-src mlperf mlcommons"` + +`cmr "get src source tiny tiny-src tiny-source tinymlperf tinymlperf-src mlperf mlcommons " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,src,source,tiny,tiny-src,tiny-source,tinymlperf,tinymlperf-src,mlperf,mlcommons' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,src,source,tiny,tiny-src,tiny-source,tinymlperf,tinymlperf-src,mlperf,mlcommons"``` + +#### Run this script via Docker (beta) + +`cm docker script "get src source tiny tiny-src tiny-source tinymlperf tinymlperf-src mlperf mlcommons" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_GIT_CHECKOUT: `master` +* CM_GIT_PATCH: `no` +* CM_GIT_RECURSE_SUBMODULES: `` +* CM_GIT_URL: `https://github.com/mlcommons/tiny.git` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-src/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-src/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-src/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-src/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-src/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-src/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-src/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-src/_cm.json) + +___ +### Script output +`cmr "get src source tiny tiny-src tiny-source tinymlperf tinymlperf-src mlperf mlcommons " -j` +#### New environment keys (filter) + +* `+PYTHONPATH` +* `CM_MLPERF_TINY_*` +#### New environment keys auto-detected from customize + +* `CM_MLPERF_TINY_BENCHMARK` +* `CM_MLPERF_TINY_DATASETS` +* `CM_MLPERF_TINY_DATASETS_AD` +* `CM_MLPERF_TINY_DATASETS_IC` +* `CM_MLPERF_TINY_DATASETS_KWS` +* `CM_MLPERF_TINY_DATASETS_KWS_OPEN` +* `CM_MLPERF_TINY_DATASETS_VWW` +* `CM_MLPERF_TINY_SRC` +* `CM_MLPERF_TINY_TRAINING` +* `CM_MLPERF_TINY_TRAINING_AD` +* `CM_MLPERF_TINY_TRAINING_IC` +* `CM_MLPERF_TINY_TRAINING_KWS` +* `CM_MLPERF_TINY_TRAINING_VWW` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/get-mlperf-training-nvidia-code.md b/docs/MLPerf-benchmark-support/get-mlperf-training-nvidia-code.md new file mode 100644 index 0000000000..e29373502d --- /dev/null +++ b/docs/MLPerf-benchmark-support/get-mlperf-training-nvidia-code.md @@ -0,0 +1,158 @@ +Automatically generated README for this automation recipe: **get-mlperf-training-nvidia-code** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-training-nvidia-code,fdc630b1d41743c5) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-training-nvidia-code)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,nvidia,mlperf,training,code,training-code* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get nvidia mlperf training code training-code" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,nvidia,mlperf,training,code,training-code` + +`cm run script --tags=get,nvidia,mlperf,training,code,training-code[,variations] ` + +*or* + +`cmr "get nvidia mlperf training code training-code"` + +`cmr "get nvidia mlperf training code training-code [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,nvidia,mlperf,training,code,training-code' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,nvidia,mlperf,training,code,training-code"``` + +#### Run this script via Docker (beta) + +`cm docker script "get nvidia mlperf training code training-code[variations]" ` + +___ +### Customization + + +#### Variations + + * Group "**repo-owner**" +
+ Click here to expand this section. + + * `_ctuning` + - Environment variables: + - *CM_TMP_TRAINING_SRC*: `ctuning` + - Workflow: + * `_custom` + - Workflow: + * **`_mlcommons`** (default) + - Environment variables: + - *CM_TMP_TRAINING_SRC*: `mlcommons` + - Workflow: + * `_nvidia-only` + - Environment variables: + - *CM_TMP_TRAINING_SRC*: `GATEOverflow` + - Workflow: + +
+ + +#### Default variations + +`_mlcommons` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `r3.0` + +* `r2.1` +* `r3.0` +* `r3.1` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-training-nvidia-code/_cm.json)*** + * get,git,repo + * CM names: `--adr.['mlperf-training-results']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-training-nvidia-code/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-training-nvidia-code/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-training-nvidia-code/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-training-nvidia-code/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-training-nvidia-code/_cm.json) + +___ +### Script output +`cmr "get nvidia mlperf training code training-code [,variations]" -j` +#### New environment keys (filter) + +* `CM_MLPERF_TRAINING_NVIDIA_CODE_PATH` +#### New environment keys auto-detected from customize + +* `CM_MLPERF_TRAINING_NVIDIA_CODE_PATH` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/get-mlperf-training-src.md b/docs/MLPerf-benchmark-support/get-mlperf-training-src.md new file mode 100644 index 0000000000..aaecc7818e --- /dev/null +++ b/docs/MLPerf-benchmark-support/get-mlperf-training-src.md @@ -0,0 +1,224 @@ +Automatically generated README for this automation recipe: **get-mlperf-training-src** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-training-src,dc440bd88e794a28) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-training-src)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,src,source,training,training-src,training-source,mlperf,mlcommons* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get src source training training-src training-source mlperf mlcommons" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,src,source,training,training-src,training-source,mlperf,mlcommons` + +`cm run script --tags=get,src,source,training,training-src,training-source,mlperf,mlcommons[,variations] ` + +*or* + +`cmr "get src source training training-src training-source mlperf mlcommons"` + +`cmr "get src source training training-src training-source mlperf mlcommons [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,src,source,training,training-src,training-source,mlperf,mlcommons' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,src,source,training,training-src,training-source,mlperf,mlcommons"``` + +#### Run this script via Docker (beta) + +`cm docker script "get src source training training-src training-source mlperf mlcommons[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_no-recurse-submodules` + - Environment variables: + - *CM_GIT_RECURSE_SUBMODULES*: `` + - Workflow: + * `_nvidia-retinanet` + - Environment variables: + - *CM_GIT_PATCH_FILENAMES*: `nvidia-retinanet.patch,cpu_load.patch` + - Workflow: + * `_patch` + - Environment variables: + - *CM_GIT_PATCH*: `yes` + - Workflow: + +
+ + + * Group "**checkout**" +
+ Click here to expand this section. + + * `_branch.#` + - Environment variables: + - *CM_GIT_CHECKOUT*: `#` + - Workflow: + * `_sha.#` + - Environment variables: + - *CM_GIT_SHA*: `#` + - Workflow: + * `_tag.#` + - Environment variables: + - *CM_GIT_CHECKOUT_TAG*: `#` + - Workflow: + +
+ + + * Group "**git-history**" +
+ Click here to expand this section. + + * `_full-history` + - Environment variables: + - *CM_GIT_DEPTH*: `` + - Workflow: + * **`_short-history`** (default) + - Environment variables: + - *CM_GIT_DEPTH*: `--depth 5` + - Workflow: + +
+ + + * Group "**repo**" +
+ Click here to expand this section. + + * `_repo.#` + - Environment variables: + - *CM_GIT_URL*: `#` + - Workflow: + +
+ + + * Group "**src**" +
+ Click here to expand this section. + + * **`_cknowledge`** (default) + - Environment variables: + - *CM_GIT_URL*: `https://github.com/cknowledge/training.git` + - Workflow: + * `_mlcommons` + - Environment variables: + - *CM_GIT_URL*: `https://github.com/mlcommons/training.git` + - Workflow: + +
+ + +#### Default variations + +`_cknowledge,_short-history` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_GIT_CHECKOUT: `master` +* CM_GIT_DEPTH: `--depth 4` +* CM_GIT_PATCH: `no` +* CM_GIT_RECURSE_SUBMODULES: ` --recurse-submodules` +* CM_GIT_CHECKOUT_FOLDER: `training` + +
+ +#### Versions +Default version: `master` + +* `custom` +* `master` +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-training-src/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-training-src/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-training-src/_cm.json)*** + * get,git,repo + * CM names: `--adr.['mlperf-training-repo']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-training-src/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-training-src/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-training-src/_cm.json) + +___ +### Script output +`cmr "get src source training training-src training-source mlperf mlcommons [,variations]" -j` +#### New environment keys (filter) + +* `+PYTHONPATH` +* `CM_MLPERF_TRAINING_*` +* `CM_MLPERF_TRAINING_LAST_RELEASE` +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/get-nvidia-mitten.md b/docs/MLPerf-benchmark-support/get-nvidia-mitten.md new file mode 100644 index 0000000000..f6467da3f4 --- /dev/null +++ b/docs/MLPerf-benchmark-support/get-nvidia-mitten.md @@ -0,0 +1,132 @@ +Automatically generated README for this automation recipe: **get-nvidia-mitten** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-nvidia-mitten,1c045f2902374de9) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-mitten)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,nvidia,mitten,nvidia-mitten* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get nvidia mitten nvidia-mitten" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,nvidia,mitten,nvidia-mitten` + +`cm run script --tags=get,nvidia,mitten,nvidia-mitten ` + +*or* + +`cmr "get nvidia mitten nvidia-mitten"` + +`cmr "get nvidia mitten nvidia-mitten " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,nvidia,mitten,nvidia-mitten' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,nvidia,mitten,nvidia-mitten"``` + +#### Run this script via Docker (beta) + +`cm docker script "get nvidia mitten nvidia-mitten" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `master` + +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-mitten/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,python3 + * CM names: `--adr.['python3', 'python']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-python-lib,_pycuda + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,git,_repo.https://github.com/NVIDIA/mitten + * CM names: `--adr.['nvidia-mitten-git-src']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-mitten/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-mitten/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-mitten/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-mitten/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-mitten/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-mitten/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-mitten/_cm.json) + +___ +### Script output +`cmr "get nvidia mitten nvidia-mitten " -j` +#### New environment keys (filter) + +* `CM_NVIDIA_MITTEN*` +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/get-spec-ptd.md b/docs/MLPerf-benchmark-support/get-spec-ptd.md new file mode 100644 index 0000000000..5c27972272 --- /dev/null +++ b/docs/MLPerf-benchmark-support/get-spec-ptd.md @@ -0,0 +1,164 @@ +Automatically generated README for this automation recipe: **get-spec-ptd** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-spec-ptd,7423a878e4524136) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-spec-ptd)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,spec,ptd,ptdaemon,power,daemon,power-daemon,mlperf,mlcommons* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get spec ptd ptdaemon power daemon power-daemon mlperf mlcommons" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,spec,ptd,ptdaemon,power,daemon,power-daemon,mlperf,mlcommons` + +`cm run script --tags=get,spec,ptd,ptdaemon,power,daemon,power-daemon,mlperf,mlcommons [--input_flags]` + +*or* + +`cmr "get spec ptd ptdaemon power daemon power-daemon mlperf mlcommons"` + +`cmr "get spec ptd ptdaemon power daemon power-daemon mlperf mlcommons " [--input_flags]` + + + +#### Input Flags + +* --**input**=Path to SPEC PTDaemon (Optional) + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "input":...} +``` +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,spec,ptd,ptdaemon,power,daemon,power-daemon,mlperf,mlcommons' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,spec,ptd,ptdaemon,power,daemon,power-daemon,mlperf,mlcommons"``` + +#### Run this script via Docker (beta) + +`cm docker script "get spec ptd ptdaemon power daemon power-daemon mlperf mlcommons" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--input=value` → `CM_INPUT=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "input":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_GIT_CHECKOUT: `main` +* CM_GIT_DEPTH: `--depth 1` +* CM_GIT_PATCH: `no` +* CM_GIT_RECURSE_SUBMODULES: ` ` +* CM_GIT_URL: `https://github.com/mlcommons/power.git` + +
+ +#### Versions +Default version: `main` + +* `custom` +* `main` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-spec-ptd/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,git,repo,_repo.https://github.com/mlcommons/power + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-spec-ptd/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-spec-ptd/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-spec-ptd/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-spec-ptd/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-spec-ptd/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-spec-ptd/_cm.json) + +___ +### Script output +`cmr "get spec ptd ptdaemon power daemon power-daemon mlperf mlcommons " [--input_flags] -j` +#### New environment keys (filter) + +* `CM_MLPERF_PTD_PATH` +* `CM_SPEC_PTD_PATH` +#### New environment keys auto-detected from customize + +* `CM_MLPERF_PTD_PATH` +* `CM_SPEC_PTD_PATH` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/import-mlperf-inference-to-experiment.md b/docs/MLPerf-benchmark-support/import-mlperf-inference-to-experiment.md new file mode 100644 index 0000000000..f7708790e3 --- /dev/null +++ b/docs/MLPerf-benchmark-support/import-mlperf-inference-to-experiment.md @@ -0,0 +1,152 @@ +Automatically generated README for this automation recipe: **import-mlperf-inference-to-experiment** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Developers: [Grigori Fursin](https://cKnowledge.org/gfursin) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=import-mlperf-inference-to-experiment,72099fa962ea499c) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-inference-to-experiment)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *import,mlperf,inference,mlperf-inference,experiment,2experiment,to-experiment* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "import mlperf inference mlperf-inference experiment 2experiment to-experiment" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=import,mlperf,inference,mlperf-inference,experiment,2experiment,to-experiment` + +`cm run script --tags=import,mlperf,inference,mlperf-inference,experiment,2experiment,to-experiment[,variations] [--input_flags]` + +*or* + +`cmr "import mlperf inference mlperf-inference experiment 2experiment to-experiment"` + +`cmr "import mlperf inference mlperf-inference experiment 2experiment to-experiment [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'import,mlperf,inference,mlperf-inference,experiment,2experiment,to-experiment' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="import,mlperf,inference,mlperf-inference,experiment,2experiment,to-experiment"``` + +#### Run this script via Docker (beta) + +`cm docker script "import mlperf inference mlperf-inference experiment 2experiment to-experiment[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_skip_checker` + - Environment variables: + - *CM_SKIP_SUBMISSION_CHECKER*: `True` + - Workflow: + +
+ + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--submitter=value` → `CM_MLPERF_SUBMITTER=value` +* `--target_repo=value` → `CM_IMPORT_MLPERF_INFERENCE_TARGET_REPO=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "submitter":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-inference-to-experiment/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-inference-to-experiment/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-inference-to-experiment/_cm.yaml) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-inference-to-experiment/_cm.yaml) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-inference-to-experiment/_cm.yaml) + +___ +### Script output +`cmr "import mlperf inference mlperf-inference experiment 2experiment to-experiment [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/import-mlperf-tiny-to-experiment.md b/docs/MLPerf-benchmark-support/import-mlperf-tiny-to-experiment.md new file mode 100644 index 0000000000..99d55bc712 --- /dev/null +++ b/docs/MLPerf-benchmark-support/import-mlperf-tiny-to-experiment.md @@ -0,0 +1,135 @@ +Automatically generated README for this automation recipe: **import-mlperf-tiny-to-experiment** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Developers: [Grigori Fursin](https://cKnowledge.org/gfursin) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=import-mlperf-tiny-to-experiment,83e3efd7611f469b) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-tiny-to-experiment)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *import,mlperf,tiny,mlperf-tiny,experiment,2experiment,to-experiment* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "import mlperf tiny mlperf-tiny experiment 2experiment to-experiment" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=import,mlperf,tiny,mlperf-tiny,experiment,2experiment,to-experiment` + +`cm run script --tags=import,mlperf,tiny,mlperf-tiny,experiment,2experiment,to-experiment [--input_flags]` + +*or* + +`cmr "import mlperf tiny mlperf-tiny experiment 2experiment to-experiment"` + +`cmr "import mlperf tiny mlperf-tiny experiment 2experiment to-experiment " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'import,mlperf,tiny,mlperf-tiny,experiment,2experiment,to-experiment' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="import,mlperf,tiny,mlperf-tiny,experiment,2experiment,to-experiment"``` + +#### Run this script via Docker (beta) + +`cm docker script "import mlperf tiny mlperf-tiny experiment 2experiment to-experiment" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--target_repo=value` → `CM_IMPORT_TINYMLPERF_TARGET_REPO=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "target_repo":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-tiny-to-experiment/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-tiny-to-experiment/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-tiny-to-experiment/_cm.yaml) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-tiny-to-experiment/_cm.yaml) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-tiny-to-experiment/_cm.yaml) + +___ +### Script output +`cmr "import mlperf tiny mlperf-tiny experiment 2experiment to-experiment " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/import-mlperf-training-to-experiment.md b/docs/MLPerf-benchmark-support/import-mlperf-training-to-experiment.md new file mode 100644 index 0000000000..e5f76bed76 --- /dev/null +++ b/docs/MLPerf-benchmark-support/import-mlperf-training-to-experiment.md @@ -0,0 +1,141 @@ +Automatically generated README for this automation recipe: **import-mlperf-training-to-experiment** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Developers: [Grigori Fursin](https://cKnowledge.org/gfursin) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=import-mlperf-training-to-experiment,b13d9b7337414f17) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-training-to-experiment)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *import,mlperf,training,mlperf-training,experiment,2experiment,to-experiment* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "import mlperf training mlperf-training experiment 2experiment to-experiment" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=import,mlperf,training,mlperf-training,experiment,2experiment,to-experiment` + +`cm run script --tags=import,mlperf,training,mlperf-training,experiment,2experiment,to-experiment [--input_flags]` + +*or* + +`cmr "import mlperf training mlperf-training experiment 2experiment to-experiment"` + +`cmr "import mlperf training mlperf-training experiment 2experiment to-experiment " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'import,mlperf,training,mlperf-training,experiment,2experiment,to-experiment' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="import,mlperf,training,mlperf-training,experiment,2experiment,to-experiment"``` + +#### Run this script via Docker (beta) + +`cm docker script "import mlperf training mlperf-training experiment 2experiment to-experiment" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--target_repo=value` → `CM_IMPORT_MLPERF_TRAINING_TARGET_REPO=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "target_repo":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-training-to-experiment/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,mlperf,logging + - CM script: [get-mlperf-logging](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-logging) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-training-to-experiment/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-training-to-experiment/_cm.yaml) + 1. ***Run native script if exists*** + * [run_mlperf_logger.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-training-to-experiment/run_mlperf_logger.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-training-to-experiment/_cm.yaml) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-training-to-experiment/_cm.yaml) + +___ +### Script output +`cmr "import mlperf training mlperf-training experiment 2experiment to-experiment " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/install-mlperf-logging-from-src.md b/docs/MLPerf-benchmark-support/install-mlperf-logging-from-src.md new file mode 100644 index 0000000000..885a883f29 --- /dev/null +++ b/docs/MLPerf-benchmark-support/install-mlperf-logging-from-src.md @@ -0,0 +1,126 @@ +Automatically generated README for this automation recipe: **install-mlperf-logging-from-src** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-mlperf-logging-from-src,f67cb84a5dc942c3) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-mlperf-logging-from-src)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *install,mlperf,logging,from.src* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install mlperf logging from.src" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,mlperf,logging,from.src` + +`cm run script --tags=install,mlperf,logging,from.src ` + +*or* + +`cmr "install mlperf logging from.src"` + +`cmr "install mlperf logging from.src " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,mlperf,logging,from.src' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,mlperf,logging,from.src"``` + +#### Run this script via Docker (beta) + +`cm docker script "install mlperf logging from.src" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +* `master` +* `v3.1` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-mlperf-logging-from-src/_cm.yaml)*** + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,git,repo,_repo.https://github.com/mlcommons/logging + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-mlperf-logging-from-src/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-mlperf-logging-from-src/_cm.yaml) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-mlperf-logging-from-src/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-mlperf-logging-from-src/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-mlperf-logging-from-src/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-mlperf-logging-from-src/_cm.yaml) + +___ +### Script output +`cmr "install mlperf logging from.src " -j` +#### New environment keys (filter) + +* `CM_MLPERF_LOGGING_REPO_PATH` +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/prepare-training-data-bert.md b/docs/MLPerf-benchmark-support/prepare-training-data-bert.md new file mode 100644 index 0000000000..fc0386cbdd --- /dev/null +++ b/docs/MLPerf-benchmark-support/prepare-training-data-bert.md @@ -0,0 +1,193 @@ +Automatically generated README for this automation recipe: **prepare-training-data-bert** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=prepare-training-data-bert,1e06a7abe23545eb) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-bert)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *prepare,mlperf,training,data,input,bert* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "prepare mlperf training data input bert" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=prepare,mlperf,training,data,input,bert` + +`cm run script --tags=prepare,mlperf,training,data,input,bert[,variations] [--input_flags]` + +*or* + +`cmr "prepare mlperf training data input bert"` + +`cmr "prepare mlperf training data input bert [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'prepare,mlperf,training,data,input,bert' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="prepare,mlperf,training,data,input,bert"``` + +#### Run this script via Docker (beta) + +`cm docker script "prepare mlperf training data input bert[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * Group "**implementation**" +
+ Click here to expand this section. + + * **`_nvidia`** (default) + - Environment variables: + - *CM_TMP_VARIATION*: `nvidia` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,git,repo,_repo.https://github.com/wchen61/training_results_v2.1,_branch.fix_bert_prepare_data + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + * `_reference` + - Environment variables: + - *CM_TMP_VARIATION*: `reference` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,mlperf,training,src + * CM names: `--adr.['mlperf-training-src']...` + - CM script: [get-mlperf-training-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-training-src) + * get,python3 + * CM names: `--adr.['python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-python-lib,_tensorflow + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_protobuf + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + +
+ + +#### Default variations + +`_nvidia` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--clean=value` → `CM_MLPERF_TRAINING_CLEAN_TFRECORDS=value` +* `--data_dir=value` → `CM_DATA_DIR=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "clean":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-bert/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-bert/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-bert/_cm.json)*** + * download,file,_gdown,_url.https://drive.google.com/uc?id=1fbGClQMi2CoMv7fwrwTC5YYPooQBdcFW + - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) + * download,file,_gdown,_url.https://drive.google.com/uc?id=1USK108J6hMM_d27xCHi738qBL8_BT1u1 + - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) + * download,file,_gdown,_url.https://drive.google.com/uc?id=1tmMgLwoBvbEJEHXh77sqrXYw5RpqT8R_ + - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) + * download-and-extract,file,_gdown,_extract,_url.https://drive.google.com/uc?id=14xV2OUGSQDG_yDBrmbSdcDC-QGeqpfs_ + - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) + * download,file,_gdown,_url.https://drive.google.com/uc?id=1chiTBljF0Eh1U5pKs6ureVHgSbtU8OG_ + - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) + * download,file,_gdown,_url.https://drive.google.com/uc?id=1Q47V3K3jFRkbJ2zGCrKkKk-n0fvMZsa0 + - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) + * download,file,_gdown,_url.https://drive.google.com/uc?id=1vAcVmXSLsLeQ1q7gvHnQUSth5W_f_pwv + - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) + 1. ***Run native script if exists*** + * [run-nvidia.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-bert/run-nvidia.sh) + * [run-reference.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-bert/run-reference.sh) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-bert/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-bert/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-bert/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-bert/_cm.json) + +___ +### Script output +`cmr "prepare mlperf training data input bert [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_MLPERF_TRAINING_BERT_*` +#### New environment keys auto-detected from customize + +* `CM_MLPERF_TRAINING_BERT_CONFIG_PATH` +* `CM_MLPERF_TRAINING_BERT_DATA_PATH` +* `CM_MLPERF_TRAINING_BERT_TFRECORDS_PATH` +* `CM_MLPERF_TRAINING_BERT_VOCAB_PATH` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/prepare-training-data-resnet.md b/docs/MLPerf-benchmark-support/prepare-training-data-resnet.md new file mode 100644 index 0000000000..1b2907e228 --- /dev/null +++ b/docs/MLPerf-benchmark-support/prepare-training-data-resnet.md @@ -0,0 +1,206 @@ +Automatically generated README for this automation recipe: **prepare-training-data-resnet** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=prepare-training-data-resnet,d42a8a8ca2704f9f) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-resnet)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *prepare,mlperf,training,data,input,resnet* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "prepare mlperf training data input resnet" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=prepare,mlperf,training,data,input,resnet` + +`cm run script --tags=prepare,mlperf,training,data,input,resnet[,variations] [--input_flags]` + +*or* + +`cmr "prepare mlperf training data input resnet"` + +`cmr "prepare mlperf training data input resnet [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'prepare,mlperf,training,data,input,resnet' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="prepare,mlperf,training,data,input,resnet"``` + +#### Run this script via Docker (beta) + +`cm docker script "prepare mlperf training data input resnet[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_mxnet.#` + - Environment variables: + - *CM_MXNET_VERSION*: `#` + - Workflow: + +
+ + + * Group "**implementation**" +
+ Click here to expand this section. + + * **`_nvidia`** (default) + - Environment variables: + - *CM_TMP_VARIATION*: `nvidia` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,mlperf,training,nvidia,code + * CM names: `--adr.['nvidia-training-code']...` + - CM script: [get-mlperf-training-nvidia-code](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-training-nvidia-code) + * get,git,repo,_repo.https://github.com/NVIDIA/DeepLearningExamples,_sha.81ee705868a11d6fe18c12d237abe4a08aab5fd6 + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + * `_reference` + - Environment variables: + - *CM_TMP_VARIATION*: `reference` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,mlperf,training,src + * CM names: `--adr.['mlperf-training-src']...` + - CM script: [get-mlperf-training-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-training-src) + * get,python3 + * CM names: `--adr.['python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-python-lib,_tensorflow + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_protobuf + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + +
+ + +#### Default variations + +`_nvidia` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--data_dir=value` → `CM_DATA_DIR=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "data_dir":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-resnet/_cm.json)*** + * get,dataset,imagenet,train + * CM names: `--adr.['imagenet-train']...` + - CM script: [get-dataset-imagenet-train](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-train) + * get,dataset,imagenet,val,original,_full + * CM names: `--adr.['imagenet-val']...` + - CM script: [get-dataset-imagenet-val](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-val) + * get,generic-sys-util,_rsync + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-resnet/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-resnet/_cm.json)*** + * download,file,_wget,_url.https://raw.githubusercontent.com/tensorflow/models/master/research/slim/datasets/imagenet_2012_validation_synset_labels.txt + - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) + * download,file,_wget,_url.https://raw.githubusercontent.com/tensorflow/tpu/master/tools/datasets/imagenet_to_gcs.py + * Enable this dependency only if all ENV vars are set:
+`{'CM_TMP_VARIATION': ['reference']}` + - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) + 1. ***Run native script if exists*** + * [run-nvidia.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-resnet/run-nvidia.sh) + * [run-reference.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-resnet/run-reference.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-resnet/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-resnet/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-resnet/_cm.json) + +___ +### Script output +`cmr "prepare mlperf training data input resnet [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_MLPERF_TRAINING_NVIDIA_RESNET_PREPROCESSED_PATH` +* `CM_MLPERF_TRAINING_RESNET_*` +#### New environment keys auto-detected from customize + +* `CM_MLPERF_TRAINING_NVIDIA_RESNET_PREPROCESSED_PATH` +* `CM_MLPERF_TRAINING_RESNET_DATA_PATH` +* `CM_MLPERF_TRAINING_RESNET_TFRECORDS_PATH` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/preprocess-mlperf-inference-submission.md b/docs/MLPerf-benchmark-support/preprocess-mlperf-inference-submission.md new file mode 100644 index 0000000000..20b71fd738 --- /dev/null +++ b/docs/MLPerf-benchmark-support/preprocess-mlperf-inference-submission.md @@ -0,0 +1,144 @@ +Automatically generated README for this automation recipe: **preprocess-mlperf-inference-submission** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=preprocess-mlperf-inference-submission,c23068394a314266) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/preprocess-mlperf-inference-submission)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *run,mlc,mlcommons,mlperf,inference,submission,mlperf-inference,processor,preprocessor,preprocess* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "run mlc mlcommons mlperf inference submission mlperf-inference processor preprocessor preprocess" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=run,mlc,mlcommons,mlperf,inference,submission,mlperf-inference,processor,preprocessor,preprocess` + +`cm run script --tags=run,mlc,mlcommons,mlperf,inference,submission,mlperf-inference,processor,preprocessor,preprocess [--input_flags]` + +*or* + +`cmr "run mlc mlcommons mlperf inference submission mlperf-inference processor preprocessor preprocess"` + +`cmr "run mlc mlcommons mlperf inference submission mlperf-inference processor preprocessor preprocess " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'run,mlc,mlcommons,mlperf,inference,submission,mlperf-inference,processor,preprocessor,preprocess' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="run,mlc,mlcommons,mlperf,inference,submission,mlperf-inference,processor,preprocessor,preprocess"``` + +#### Run this script via Docker (beta) + +`cm docker script "run mlc mlcommons mlperf inference submission mlperf-inference processor preprocessor preprocess" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--submission_dir=value` → `CM_MLPERF_INFERENCE_SUBMISSION_DIR=value` +* `--submitter=value` → `CM_MLPERF_SUBMITTER=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "submission_dir":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/preprocess-mlperf-inference-submission/_cm.json)*** + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,mlcommons,inference,src + * CM names: `--adr.['inference-src', 'submission-checker-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,mlperf,submission,dir + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_INFERENCE_SUBMISSION_DIR': ['on']}` + * CM names: `--adr.['get-mlperf-submission-dir']...` + - CM script: [get-mlperf-inference-submission-dir](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-submission-dir) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/preprocess-mlperf-inference-submission/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/preprocess-mlperf-inference-submission/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/preprocess-mlperf-inference-submission/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/preprocess-mlperf-inference-submission/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/preprocess-mlperf-inference-submission/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/preprocess-mlperf-inference-submission/_cm.json) + +___ +### Script output +`cmr "run mlc mlcommons mlperf inference submission mlperf-inference processor preprocessor preprocess " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/process-mlperf-accuracy.md b/docs/MLPerf-benchmark-support/process-mlperf-accuracy.md new file mode 100644 index 0000000000..47b3f0b5fa --- /dev/null +++ b/docs/MLPerf-benchmark-support/process-mlperf-accuracy.md @@ -0,0 +1,334 @@ +Automatically generated README for this automation recipe: **process-mlperf-accuracy** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=process-mlperf-accuracy,6e809013816b42ea) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-mlperf-accuracy)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *run,mlperf,mlcommons,accuracy,mlc,process,process-accuracy* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "run mlperf mlcommons accuracy mlc process process-accuracy" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=run,mlperf,mlcommons,accuracy,mlc,process,process-accuracy` + +`cm run script --tags=run,mlperf,mlcommons,accuracy,mlc,process,process-accuracy[,variations] [--input_flags]` + +*or* + +`cmr "run mlperf mlcommons accuracy mlc process process-accuracy"` + +`cmr "run mlperf mlcommons accuracy mlc process process-accuracy [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'run,mlperf,mlcommons,accuracy,mlc,process,process-accuracy' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="run,mlperf,mlcommons,accuracy,mlc,process,process-accuracy"``` + +#### Run this script via Docker (beta) + +`cm docker script "run mlperf mlcommons accuracy mlc process process-accuracy[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_default-pycocotools,openimages` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_pycocotools + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,mlcommons,mlperf,inference,src,-_openimages-nvidia-pycocotools + * CM names: `--adr.['for-pycocotools', 'accuracy-check-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * `_nvidia-pycocotools,openimages` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_nvidia-pycocotools + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,mlcommons,mlperf,inference,src,_openimages-nvidia-pycocotools + * CM names: `--adr.['for-pycocotools', 'accuracy-check-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + +
+ + + * Group "**coco-evaluation-tool**" +
+ Click here to expand this section. + + * **`_default-pycocotools`** (default) + - Workflow: + * `_nvidia-pycocotools` + - Workflow: + +
+ + + * Group "**dataset**" +
+ Click here to expand this section. + + * `_cnndm` + - Environment variables: + - *CM_DATASET*: `cnndm` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,dataset,cnndm,_validation + - CM script: [get-dataset-cnndm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-cnndm) + * get,generic-python-lib,_package.rouge_score + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.nltk + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.evaluate + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.absl-py + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.rouge_score + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_coco2014` + - Environment variables: + - *CM_DATASET*: `coco2014` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,dataset,coco2014,original + * CM names: `--adr.['coco2014-dataset', 'coco2014-original']...` + - CM script: [get-dataset-coco2014](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-coco2014) + * **`_imagenet`** (default) + - Environment variables: + - *CM_DATASET*: `imagenet` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,dataset-aux,image-classification,imagenet-aux + - CM script: [get-dataset-imagenet-aux](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-aux) + * get,generic-python-lib,_numpy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_kits19` + - Environment variables: + - *CM_DATASET*: `kits19` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,dataset,preprocessed,medical-imaging,kits19 + - CM script: [get-preprocessed-dataset-kits19](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-kits19) + * `_librispeech` + - Environment variables: + - *CM_DATASET*: `librispeech` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,dataset,preprocessed,speech-recognition,librispeech + - CM script: [get-preprocessed-dataset-librispeech](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-librispeech) + * `_open-orca` + - Environment variables: + - *CM_DATASET*: `openorca` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,dataset,openorca,preprocessed + * CM names: `--adr.['openorca-dataset']...` + - CM script: [get-preprocessed-dataset-openorca](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-openorca) + * get,ml-model,llama2 + * CM names: `--adr.['llama2-model']...` + - CM script: [get-ml-model-llama2](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-llama2) + * `_openimages` + - Environment variables: + - *CM_DATASET*: `openimages` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,dataset-aux,openimages,annotations + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_RUN_STYLE': ['valid']}` + - CM script: [get-dataset-openimages-annotations](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openimages-annotations) + * get,dataset,openimages,original + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_RUN_STYLE': ['valid']}` + * CM names: `--adr.['openimages-original']...` + - CM script: [get-dataset-openimages](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openimages) + * get,generic-python-lib,_package.kiwisolver + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_squad` + - Environment variables: + - *CM_DATASET*: `squad` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_boto3 + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.transformers + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,dataset,squad,language-processing + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_DATASET_SQUAD_VAL_PATH': []}` + - CM script: [get-dataset-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-squad) + * get,dataset-aux,squad-vocab + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_ML_MODEL_BERT_VOCAB_FILE_WITH_PATH': ['on']}` + - CM script: [get-dataset-squad-vocab](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-squad-vocab) + * get,generic-python-lib,_torch + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_tokenization + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_terabyte` + - Environment variables: + - *CM_DATASET*: `squad` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_ujson + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_scikit-learn + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_numpy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + +
+ + + * Group "**precision**" +
+ Click here to expand this section. + + * `_float16` + - Environment variables: + - *CM_ACCURACY_DTYPE*: `float16` + - Workflow: + * **`_float32`** (default) + - Environment variables: + - *CM_ACCURACY_DTYPE*: `float32` + - Workflow: + * `_float64` + - Environment variables: + - *CM_ACCURACY_DTYPE*: `float64` + - Workflow: + * `_int16` + - Environment variables: + - *CM_ACCURACY_DTYPE*: `int16` + - Workflow: + * `_int32` + - Environment variables: + - *CM_ACCURACY_DTYPE*: `int32` + - Workflow: + * `_int64` + - Environment variables: + - *CM_ACCURACY_DTYPE*: `int64` + - Workflow: + * `_int8` + - Environment variables: + - *CM_ACCURACY_DTYPE*: `int8` + - Workflow: + +
+ + +#### Default variations + +`_default-pycocotools,_float32,_imagenet` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--result_dir=value` → `CM_MLPERF_ACCURACY_RESULTS_DIR=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "result_dir":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-mlperf-accuracy/_cm.json)*** + * get,python3 + * CM names: `--adr.['python3', 'python']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,mlcommons,inference,src + * CM names: `--adr.['inference-src', 'accuracy-check-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-mlperf-accuracy/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-mlperf-accuracy/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-mlperf-accuracy/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-mlperf-accuracy/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-mlperf-accuracy/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-mlperf-accuracy/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-mlperf-accuracy/_cm.json) + +___ +### Script output +`cmr "run mlperf mlcommons accuracy mlc process process-accuracy [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/push-mlperf-inference-results-to-github.md b/docs/MLPerf-benchmark-support/push-mlperf-inference-results-to-github.md new file mode 100644 index 0000000000..ebca5beceb --- /dev/null +++ b/docs/MLPerf-benchmark-support/push-mlperf-inference-results-to-github.md @@ -0,0 +1,150 @@ +Automatically generated README for this automation recipe: **push-mlperf-inference-results-to-github** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=push-mlperf-inference-results-to-github,36c2ffd5df5d453a) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-mlperf-inference-results-to-github)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *push,mlperf,mlperf-inference-results,publish-results,inference,submission,github* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "push mlperf mlperf-inference-results publish-results inference submission github" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=push,mlperf,mlperf-inference-results,publish-results,inference,submission,github` + +`cm run script --tags=push,mlperf,mlperf-inference-results,publish-results,inference,submission,github [--input_flags]` + +*or* + +`cmr "push mlperf mlperf-inference-results publish-results inference submission github"` + +`cmr "push mlperf mlperf-inference-results publish-results inference submission github " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'push,mlperf,mlperf-inference-results,publish-results,inference,submission,github' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="push,mlperf,mlperf-inference-results,publish-results,inference,submission,github"``` + +#### Run this script via Docker (beta) + +`cm docker script "push mlperf mlperf-inference-results publish-results inference submission github" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--branch=value` → `CM_GIT_BRANCH=value` +* `--commit_message=value` → `CM_MLPERF_RESULTS_REPO_COMMIT_MESSAGE=value` +* `--repo_branch=value` → `CM_GIT_BRANCH=value` +* `--repo_url=value` → `CM_MLPERF_RESULTS_GIT_REPO_URL=value` +* `--submission_dir=value` → `CM_MLPERF_INFERENCE_SUBMISSION_DIR=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "branch":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_MLPERF_RESULTS_GIT_REPO_URL: `https://github.com/ctuning/mlperf_inference_submissions_v4.0` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-mlperf-inference-results-to-github/_cm.json)*** + * get,python3 + * CM names: `--adr.['python3', 'python']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-sys-util,_rsync + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,mlperf,submission,dir + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_INFERENCE_SUBMISSION_DIR': ['on']}` + * CM names: `--adr.['get-mlperf-submission-dir']...` + - CM script: [get-mlperf-inference-submission-dir](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-submission-dir) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-mlperf-inference-results-to-github/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-mlperf-inference-results-to-github/_cm.json)*** + * get,git,repo + * CM names: `--adr.['get-git-repo']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-mlperf-inference-results-to-github/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-mlperf-inference-results-to-github/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-mlperf-inference-results-to-github/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-mlperf-inference-results-to-github/_cm.json) + +___ +### Script output +`cmr "push mlperf mlperf-inference-results publish-results inference submission github " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/run-all-mlperf-models.md b/docs/MLPerf-benchmark-support/run-all-mlperf-models.md new file mode 100644 index 0000000000..01f5427b17 --- /dev/null +++ b/docs/MLPerf-benchmark-support/run-all-mlperf-models.md @@ -0,0 +1,237 @@ +
+Click here to see the table of contents. + +* [About](#about) +* [Summary](#summary) +* [Reuse this script in your project](#reuse-this-script-in-your-project) + * [ Install CM automation language](#install-cm-automation-language) + * [ Check CM script flags](#check-cm-script-flags) + * [ Run this script from command line](#run-this-script-from-command-line) + * [ Run this script from Python](#run-this-script-from-python) + * [ Run this script via GUI](#run-this-script-via-gui) + * [ Run this script via Docker (beta)](#run-this-script-via-docker-(beta)) +* [Customization](#customization) + * [ Variations](#variations) + * [ Default environment](#default-environment) +* [Script workflow, dependencies and native scripts](#script-workflow-dependencies-and-native-scripts) +* [Script output](#script-output) +* [New environment keys (filter)](#new-environment-keys-(filter)) +* [New environment keys auto-detected from customize](#new-environment-keys-auto-detected-from-customize) +* [Maintainers](#maintainers) + +
+ +*Note that this README is automatically generated - don't edit!* + +### About + +#### Summary + +* Category: *MLPerf benchmark support.* +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* CM "database" tags to find this script: *run,natively,all,mlperf-models* +* Output cached? *False* +___ +### Reuse this script in your project + +#### Install CM automation language + +* [Installation guide](https://github.com/mlcommons/ck/blob/master/docs/installation.md) +* [CM intro](https://doi.org/10.5281/zenodo.8105339) + +#### Pull CM repository with this automation + +```cm pull repo mlcommons@cm4mlops --checkout=dev``` + + +#### Run this script from command line + +1. `cm run script --tags=run,natively,all,mlperf-models[,variations] ` + +2. `cmr "run natively all mlperf-models[ variations]" ` + +* `variations` can be seen [here](#variations) + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'run,natively,all,mlperf-models' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="run,natively,all,mlperf-models"``` + +Use this [online GUI](https://cKnowledge.org/cm-gui/?tags=run,natively,all,mlperf-models) to generate CM CMD. + +#### Run this script via Docker (beta) + +`cm docker script "run natively all mlperf-models[ variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_phoenix,reference` + - Workflow: + +
+ + + * Group "**implementation**" +
+ Click here to expand this section. + + * `_deepsparse` + - Environment variables: + - *DIVISION*: `open` + - *IMPLEMENTATION*: `deepsparse` + - Workflow: + * `_intel` + - Environment variables: + - *IMPLEMENTATION*: `intel` + - Workflow: + * `_mil` + - Environment variables: + - *IMPLEMENTATION*: `mil` + - Workflow: + * `_nvidia` + - Environment variables: + - *IMPLEMENTATION*: `nvidia` + - Workflow: + * `_qualcomm` + - Environment variables: + - *IMPLEMENTATION*: `qualcomm` + - Workflow: + * `_reference` + - Environment variables: + - *IMPLEMENTATION*: `reference` + - Workflow: + * `_tflite-cpp` + - Environment variables: + - *IMPLEMENTATION*: `tflite_cpp` + - Workflow: + +
+ + + * Group "**power**" +
+ Click here to expand this section. + + * **`_performance-only`** (default) + - Workflow: + * `_power` + - Environment variables: + - *POWER*: `True` + - Workflow: + +
+ + + * Group "**sut**" +
+ Click here to expand this section. + + * `_macbookpro-m1` + - Environment variables: + - *CATEGORY*: `edge` + - *DIVISION*: `closed` + - Workflow: + * `_orin.32g` + - Environment variables: + - *CATEGORY*: `edge` + - *DIVISION*: `closed` + - Workflow: + * `_phoenix` + - Environment variables: + - *CATEGORY*: `edge,datacenter` + - *DIVISION*: `closed` + - Workflow: + * `_sapphire-rapids.24c` + - Environment variables: + - *CATEGORY*: `edge,datacenter` + - *DIVISION*: `closed` + - Workflow: + +
+ + +#### Default variations + +`_performance-only` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Script workflow, dependencies and native scripts + +
+Click here to expand this section. + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/_cm.yaml) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/_cm.yaml) + 1. ***Run native script if exists*** + * [run-bert-macos.sh](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/run-bert-macos.sh) + * [run-bert.sh](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/run-bert.sh) + * [run-cpp-implementation.sh](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/run-cpp-implementation.sh) + * [run-mobilenet-models.sh](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/run-mobilenet-models.sh) + * [run-nvidia-4090.sh](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/run-nvidia-4090.sh) + * [run-nvidia-a100.sh](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/run-nvidia-a100.sh) + * [run-nvidia-t4.sh](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/run-nvidia-t4.sh) + * [run-pruned-bert.sh](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/run-pruned-bert.sh) + * [run-reference-models.sh](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/run-reference-models.sh) + * [run-resnet50-macos.sh](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/run-resnet50-macos.sh) + * [run-resnet50.sh](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/run-resnet50.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/_cm.yaml) +
+ +___ +### Script output +`cmr "run natively all mlperf-models[,variations]" -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize + +___ +### Maintainers + +* [Open MLCommons taskforce on automation and reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/run-mlperf-inference-mobilenet-models.md b/docs/MLPerf-benchmark-support/run-mlperf-inference-mobilenet-models.md new file mode 100644 index 0000000000..a72c5e7985 --- /dev/null +++ b/docs/MLPerf-benchmark-support/run-mlperf-inference-mobilenet-models.md @@ -0,0 +1,383 @@ +Automatically generated README for this automation recipe: **run-mlperf-inference-mobilenet-models** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=run-mlperf-inference-mobilenet-models,f21cc993a8b14a58) ]* + +--- + +## Set up + +We need to get imagenet full dataset to make image-classification submissions for MLPerf inference. Since this dataset is not publicly available via a URL please follow the instructions given [here](https://github.com/mlcommons/ck/blob/master/cm-mlops/script/get-dataset-imagenet-val/README-extra.md) to download the dataset and register in CM. + +
+Click here to set up docker (Optional). + +### Docker Setup + +CM commands are expected to run natively but if you prefer not to modify the host system, you can do the below command to set up a docker container. + +``` +cm docker script --tags=run,mobilenet-models,_tflite,_accuracy-only \ +--adr.compiler.tags=gcc \ +--docker_cm_repo=mlcommons@cm4mlops \ +--imagenet_path=$HOME/imagenet-2012-val \ +--results_dir=$HOME/mobilenet_results \ +--submission_dir=$HOME/inference_submission_3.1 \ +--docker_skip_run_cmd +``` + +This command will build a docker container and give you an interactive shell from which you can execute the below CM run commands. +* `results_dir`, `submission_dir` and `imagenet_path` are mounted from the host system. +* `results_dir` and `submission_dir` are expected to be empty directories to be populated by the docker +* `imagenet_path` should point to the imagenet folder containing the 50000 validation images. + +
+ +## Run Commands + +Since the runs can take many hours, in case you are running remotely you can install screen as follows. You may omit "screen" from all commands if you are running on a host system. +``` +cmr "get generic-sys-util _screen" +``` +### Default tflite + + +#### Do a full accuracy run for all the models (can take almost a day) + +``` +screen cmr "run mobilenet-models _tflite _accuracy-only" \ +--adr.compiler.tags=gcc \ +--results_dir=$HOME/mobilenet_results +``` + +#### Do a full performance run for all the models (can take almost a day) +``` +screen cmr "run mobilenet-models _tflite _performance-only" \ +--adr.compiler.tags=gcc \ +--results_dir=$HOME/mobilenet_results +``` + +#### Generate README files for all the runs +``` +cmr "run mobilenet-models _tflite _populate-readme" \ +--adr.compiler.tags=gcc \ +--results_dir=$HOME/mobilenet_results +``` + +#### Generate actual submission tree + +We should use the master branch of MLCommons inference repo for the submission checker. You can use `--hw_note_extra` option to add your name to the notes. +``` +cmr "generate inference submission" \ +--results_dir=$HOME/mobilenet_results/valid_results \ +--submission_dir=$HOME/mobilenet_submission_tree \ +--clean \ +--infer_scenario_results=yes \ +--adr.compiler.tags=gcc --adr.inference-src.version=master \ +--run-checker \ +--submitter=cTuning \ +--hw_notes_extra="Result taken by NAME" +``` +* Use `--hw_name="My system name"` to give a meaningful system name. Examples can be seen [here](https://github.com/mlcommons/inference_results_v3.0/tree/main/open/cTuning/systems) + +#### Push the results to GitHub repo + +First, create a fork of [this repo](https://github.com/ctuning/mlperf_inference_submissions_v3.1/). Then run the following command after replacing `--repo_url` with your fork URL. +``` +cmr "push github mlperf inference submission" \ +--submission_dir=$HOME/mobilenet_submission_tree \ +--repo_url=https://github.com/ctuning/mlperf_inference_submissions_v3.1/ \ +--commit_message="Mobilenet results added" +``` + +Create a PR to [cTuning repo](https://github.com/ctuning/mlperf_inference_submissions_v3.1/) + +### Using ARMNN with NEON + +Follow the same procedure as above but for the first three experiment runs add `_armnn,_neon` to the tags. For example +``` +cmr "run mobilenet-models _tflite _armnn _neon _accuracy-only" \ +--adr.compiler.tags=gcc \ +--results_dir=$HOME/mobilenet_results +``` + +`results_dir` and `submission_dir` can be the same as before as results will be going to different subfolders. + +### Using ARMNN with OpenCL +Follow the same procedure as above but for the first three experiment runs add `_armnn,_opencl` to the tags. For example +``` +cmr "run mobilenet-models _tflite _armnn _opencl _accuracy-only" \ +--adr.compiler.tags=gcc \ +--results_dir=$HOME/mobilenet_results +``` + +`results_dir` and `submission_dir` can be the same as before as results will be going to different subfolders. + + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-mobilenet-models)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *run,mobilenet,models,image-classification,mobilenet-models,mlperf,inference* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "run mobilenet models image-classification mobilenet-models mlperf inference" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=run,mobilenet,models,image-classification,mobilenet-models,mlperf,inference` + +`cm run script --tags=run,mobilenet,models,image-classification,mobilenet-models,mlperf,inference[,variations] [--input_flags]` + +*or* + +`cmr "run mobilenet models image-classification mobilenet-models mlperf inference"` + +`cmr "run mobilenet models image-classification mobilenet-models mlperf inference [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'run,mobilenet,models,image-classification,mobilenet-models,mlperf,inference' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="run,mobilenet,models,image-classification,mobilenet-models,mlperf,inference"``` + +#### Run this script via Docker (beta) + +`cm docker script "run mobilenet models image-classification mobilenet-models mlperf inference[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_armnn` + - Environment variables: + - *CM_MLPERF_USE_ARMNN_LIBRARY*: `yes` + - Workflow: + * `_neon` + - Aliases: `_use-neon` + - Environment variables: + - *CM_MLPERF_USE_NEON*: `yes` + - Workflow: + * `_only-fp32` + - Environment variables: + - *CM_MLPERF_RUN_INT8*: `no` + - Workflow: + * `_only-int8` + - Environment variables: + - *CM_MLPERF_RUN_FP32*: `no` + - Workflow: + * `_opencl` + - Environment variables: + - *CM_MLPERF_USE_OPENCL*: `yes` + - Workflow: + * `_tflite,armnn` + - Environment variables: + - *CM_MLPERF_TFLITE_ARMNN*: `yes` + - Workflow: + * `_tflite,armnn,neon` + - Environment variables: + - *CM_MLPERF_TFLITE_ARMNN_NEON*: `yes` + - Workflow: + * `_tflite,armnn,opencl` + - Environment variables: + - *CM_MLPERF_TFLITE_ARMNN_OPENCL*: `yes` + - Workflow: + +
+ + + * Group "**base-framework**" +
+ Click here to expand this section. + + * **`_tflite`** (default) + - Workflow: + +
+ + + * Group "**model-selection**" +
+ Click here to expand this section. + + * **`_all-models`** (default) + - Environment variables: + - *CM_MLPERF_RUN_MOBILENETS*: `yes` + - *CM_MLPERF_RUN_EFFICIENTNETS*: `yes` + - Workflow: + * `_efficientnet` + - Environment variables: + - *CM_MLPERF_RUN_EFFICIENTNETS*: `yes` + - Workflow: + * `_mobilenet` + - Environment variables: + - *CM_MLPERF_RUN_MOBILENETS*: `yes` + - Workflow: + +
+ + + * Group "**optimization**" +
+ Click here to expand this section. + + * **`_tflite-default`** (default) + - Environment variables: + - *CM_MLPERF_TFLITE_DEFAULT_MODE*: `yes` + - Workflow: + +
+ + + * Group "**run-mode**" +
+ Click here to expand this section. + + * `_accuracy-only` + - Environment variables: + - *CM_MLPERF_FIND_PERFORMANCE_MODE*: `no` + - *CM_MLPERF_ACCURACY_MODE*: `yes` + - *CM_MLPERF_SUBMISSION_MODE*: `no` + - Workflow: + * `_find-performance` + - Environment variables: + - *CM_MLPERF_FIND_PERFORMANCE_MODE*: `yes` + - *CM_MLPERF_SUBMISSION_MODE*: `no` + - Workflow: + * `_performance-only` + - Environment variables: + - *CM_MLPERF_FIND_PERFORMANCE_MODE*: `no` + - *CM_MLPERF_PERFORMANCE_MODE*: `yes` + - *CM_MLPERF_SUBMISSION_MODE*: `no` + - Workflow: + * `_populate-readme` + - Environment variables: + - *CM_MLPERF_FIND_PERFORMANCE_MODE*: `no` + - *CM_MLPERF_POPULATE_README*: `yes` + - Workflow: + * `_submission` + - Environment variables: + - *CM_MLPERF_FIND_PERFORMANCE_MODE*: `no` + - *CM_MLPERF_SUBMISSION_MODE*: `yes` + - Workflow: + +
+ + +#### Default variations + +`_all-models,_tflite,_tflite-default` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--find-performance=value` → `CM_MLPERF_FIND_PERFORMANCE_MODE=value` +* `--imagenet_path=value` → `IMAGENET_PATH=value` +* `--no-rerun=value` → `CM_MLPERF_NO_RERUN=value` +* `--power=value` → `CM_MLPERF_POWER=value` +* `--results_dir=value` → `CM_MLPERF_INFERENCE_RESULTS_DIR=value` +* `--submission=value` → `CM_MLPERF_SUBMISSION_MODE=value` +* `--submission_dir=value` → `CM_MLPERF_INFERENCE_SUBMISSION_DIR=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "find-performance":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_MLPERF_RUN_MOBILENETS: `no` +* CM_MLPERF_RUN_EFFICIENTNETS: `no` +* CM_MLPERF_NO_RERUN: `no` +* CM_MLPERF_RUN_FP32: `yes` +* CM_MLPERF_RUN_INT8: `yes` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-mobilenet-models/_cm.json)*** + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-mobilenet-models/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-mobilenet-models/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-mobilenet-models/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-mobilenet-models/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-mobilenet-models/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-mobilenet-models/_cm.json) + +___ +### Script output +`cmr "run mobilenet models image-classification mobilenet-models mlperf inference [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/run-mlperf-inference-submission-checker.md b/docs/MLPerf-benchmark-support/run-mlperf-inference-submission-checker.md new file mode 100644 index 0000000000..a530b154e0 --- /dev/null +++ b/docs/MLPerf-benchmark-support/run-mlperf-inference-submission-checker.md @@ -0,0 +1,199 @@ +Automatically generated README for this automation recipe: **run-mlperf-inference-submission-checker** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=run-mlperf-inference-submission-checker,15d03ec2c1af4297) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-submission-checker)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *run,mlc,mlcommons,mlperf,inference,mlperf-inference,submission,checker,submission-checker,mlc-submission-checker* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "run mlc mlcommons mlperf inference mlperf-inference submission checker submission-checker mlc-submission-checker" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=run,mlc,mlcommons,mlperf,inference,mlperf-inference,submission,checker,submission-checker,mlc-submission-checker` + +`cm run script --tags=run,mlc,mlcommons,mlperf,inference,mlperf-inference,submission,checker,submission-checker,mlc-submission-checker[,variations] [--input_flags]` + +*or* + +`cmr "run mlc mlcommons mlperf inference mlperf-inference submission checker submission-checker mlc-submission-checker"` + +`cmr "run mlc mlcommons mlperf inference mlperf-inference submission checker submission-checker mlc-submission-checker [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'run,mlc,mlcommons,mlperf,inference,mlperf-inference,submission,checker,submission-checker,mlc-submission-checker' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="run,mlc,mlcommons,mlperf,inference,mlperf-inference,submission,checker,submission-checker,mlc-submission-checker"``` + +#### Run this script via Docker (beta) + +`cm docker script "run mlc mlcommons mlperf inference mlperf-inference submission checker submission-checker mlc-submission-checker[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_short-run` + - Environment variables: + - *CM_MLPERF_SHORT_RUN*: `yes` + - Workflow: + +
+ + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--extra_args=value` → `CM_MLPERF_SUBMISSION_CHECKER_EXTRA_ARGS=value` +* `--extra_model_benchmark_map=value` → `CM_MLPERF_EXTRA_MODEL_MAPPING=value` +* `--input=value` → `CM_MLPERF_INFERENCE_SUBMISSION_DIR=value` +* `--power=value` → `CM_MLPERF_POWER=value` +* `--push_to_github=value` → `CM_MLPERF_RESULT_PUSH_TO_GITHUB=value` +* `--skip_compliance=value` → `CM_MLPERF_SKIP_COMPLIANCE=value` +* `--skip_power_check=value` → `CM_MLPERF_SKIP_POWER_CHECK=value` +* `--src_version=value` → `CM_MLPERF_SUBMISSION_CHECKER_VERSION=value` +* `--submission_dir=value` → `CM_MLPERF_INFERENCE_SUBMISSION_DIR=value` +* `--submitter=value` → `CM_MLPERF_SUBMITTER=value` +* `--tar=value` → `CM_TAR_SUBMISSION_DIR=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "extra_args":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_MLPERF_SHORT_RUN: `no` + +
+ +#### Versions +Default version: `master` + +* `master` +* `r3.0` +* `r3.1` +* `r4.0` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-submission-checker/_cm.json)*** + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,mlcommons,inference,src + * CM names: `--adr.['inference-src', 'submission-checker-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,generic-python-lib,_xlsxwriter + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.pyarrow + * CM names: `--adr.['pyarrow']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_pandas + * CM names: `--adr.['pandas']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,mlperf,submission,dir + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_INFERENCE_SUBMISSION_DIR': ['on']}` + * CM names: `--adr.['get-mlperf-submission-dir']...` + - CM script: [get-mlperf-inference-submission-dir](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-submission-dir) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-submission-checker/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-submission-checker/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-submission-checker/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-submission-checker/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-submission-checker/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-submission-checker/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-submission-checker/_cm.json)*** + * publish-results,dashboard + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_DASHBOARD': ['on']}` + - CM script: [publish-results-to-dashboard](https://github.com/mlcommons/cm4mlops/tree/master/script/publish-results-to-dashboard) + * publish-results,github + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_RESULT_PUSH_TO_GITHUB': ['on']}` + * CM names: `--adr.['push-to-github']...` + - CM script: [push-mlperf-inference-results-to-github](https://github.com/mlcommons/cm4mlops/tree/master/script/push-mlperf-inference-results-to-github) + * run,tar + * Enable this dependency only if all ENV vars are set:
+`{'CM_TAR_SUBMISSION_DIR': ['yes']}` + - CM script: [tar-my-folder](https://github.com/mlcommons/cm4mlops/tree/master/script/tar-my-folder) + +___ +### Script output +`cmr "run mlc mlcommons mlperf inference mlperf-inference submission checker submission-checker mlc-submission-checker [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/run-mlperf-power-client.md b/docs/MLPerf-benchmark-support/run-mlperf-power-client.md new file mode 100644 index 0000000000..d0892f8421 --- /dev/null +++ b/docs/MLPerf-benchmark-support/run-mlperf-power-client.md @@ -0,0 +1,154 @@ +Automatically generated README for this automation recipe: **run-mlperf-power-client** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=run-mlperf-power-client,bf6a6d0cc97b48ae) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-client)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *run,mlc,mlcommons,mlperf,power,client,power-client* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "run mlc mlcommons mlperf power client power-client" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=run,mlc,mlcommons,mlperf,power,client,power-client` + +`cm run script --tags=run,mlc,mlcommons,mlperf,power,client,power-client [--input_flags]` + +*or* + +`cmr "run mlc mlcommons mlperf power client power-client"` + +`cmr "run mlc mlcommons mlperf power client power-client " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'run,mlc,mlcommons,mlperf,power,client,power-client' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="run,mlc,mlcommons,mlperf,power,client,power-client"``` + +#### Run this script via Docker (beta) + +`cm docker script "run mlc mlcommons mlperf power client power-client" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--loadgen_logs_dir=value` → `CM_MLPERF_LOADGEN_LOGS_DIR=value` +* `--log_dir=value` → `CM_MLPERF_POWER_LOG_DIR=value` +* `--max_amps=value` → `CM_MLPERF_POWER_MAX_AMPS=value` +* `--max_volts=value` → `CM_MLPERF_POWER_MAX_VOLTS=value` +* `--ntp_server=value` → `CM_MLPERF_POWER_NTP_SERVER=value` +* `--port=value` → `CM_MLPERF_POWER_SERVER_PORT=value` +* `--power_server=value` → `CM_MLPERF_POWER_SERVER_ADDRESS=value` +* `--run_cmd=value` → `CM_MLPERF_RUN_CMD=value` +* `--server=value` → `CM_MLPERF_POWER_SERVER_ADDRESS=value` +* `--server_port=value` → `CM_MLPERF_POWER_SERVER_PORT=value` +* `--timestamp=value` → `CM_MLPERF_POWER_TIMESTAMP=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "loadgen_logs_dir":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_MLPERF_POWER_LOG_DIR: `logs` +* CM_MLPERF_RUN_CMD: `` +* CM_MLPERF_POWER_SERVER_ADDRESS: `localhost` +* CM_MLPERF_POWER_NTP_SERVER: `time.google.com` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-client/_cm.json)*** + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,mlperf,power,src + * CM names: `--adr.['power-src']...` + - CM script: [get-mlperf-power-dev](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-power-dev) + * get,generic-sys-util,_ntpdate + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-client/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-client/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-client/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-client/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-client/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-client/_cm.json) + +___ +### Script output +`cmr "run mlc mlcommons mlperf power client power-client " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/run-mlperf-power-server.md b/docs/MLPerf-benchmark-support/run-mlperf-power-server.md new file mode 100644 index 0000000000..99e37b374b --- /dev/null +++ b/docs/MLPerf-benchmark-support/run-mlperf-power-server.md @@ -0,0 +1,165 @@ +Automatically generated README for this automation recipe: **run-mlperf-power-server** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=run-mlperf-power-server,5bc68aaf389a40bd) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-server)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *run,mlc,mlcommons,mlperf,power,server,power-server* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "run mlc mlcommons mlperf power server power-server" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=run,mlc,mlcommons,mlperf,power,server,power-server` + +`cm run script --tags=run,mlc,mlcommons,mlperf,power,server,power-server [--input_flags]` + +*or* + +`cmr "run mlc mlcommons mlperf power server power-server"` + +`cmr "run mlc mlcommons mlperf power server power-server " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'run,mlc,mlcommons,mlperf,power,server,power-server' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="run,mlc,mlcommons,mlperf,power,server,power-server"``` + +#### Run this script via Docker (beta) + +`cm docker script "run mlc mlcommons mlperf power server power-server" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--device_port=value` → `CM_MLPERF_POWER_DEVICE_PORT=value` +* `--device_type=value` → `CM_MLPERF_POWER_DEVICE_TYPE=value` +* `--interface_flag=value` → `CM_MLPERF_POWER_INTERFACE_FLAG=value` +* `--ntp_server=value` → `CM_MLPERF_POWER_NTP_SERVER=value` +* `--screen=value` → `CM_MLPERF_POWER_SERVER_USE_SCREEN=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "device_port":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_MLPERF_POWER_NTP_SERVER: `time.google.com` +* CM_MLPERF_POWER_INTERFACE_FLAG: `` +* CM_MLPERF_POWER_DEVICE_TYPE: `49` +* CM_MLPERF_POWER_SERVER_ADDRESS: `0.0.0.0` +* CM_MLPERF_POWER_SERVER_PORT: `4950` +* CM_MLPERF_POWER_DEVICE_PORT: `/dev/usbtmc0` +* CM_MLPERF_POWER_SERVER_USE_SCREEN: `no` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-server/_cm.json)*** + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,mlperf,power,src + * CM names: `--adr.['power-src']...` + - CM script: [get-mlperf-power-dev](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-power-dev) + * get,mlperf,power,daemon + * CM names: `--adr.['power-damenon']...` + - CM script: [get-spec-ptd](https://github.com/mlcommons/cm4mlops/tree/master/script/get-spec-ptd) + * get,generic,sys-util,_screen + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_HOST_OS_TYPE': 'windows'}` + * CM names: `--adr.['screen']...` + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic-python-lib,_package.pypiwin32 + * Enable this dependency only if all ENV vars are set:
+`{'CM_HOST_OS_TYPE': 'windows'}` + * CM names: `--adr.['win32']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-server/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-server/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-server/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-server/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-server/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-server/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-server/_cm.json) + +___ +### Script output +`cmr "run mlc mlcommons mlperf power server power-server " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/run-mlperf-training-submission-checker.md b/docs/MLPerf-benchmark-support/run-mlperf-training-submission-checker.md new file mode 100644 index 0000000000..10f093c5f6 --- /dev/null +++ b/docs/MLPerf-benchmark-support/run-mlperf-training-submission-checker.md @@ -0,0 +1,181 @@ +Automatically generated README for this automation recipe: **run-mlperf-training-submission-checker** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=run-mlperf-training-submission-checker,cb5cb60ac9a74d09) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-training-submission-checker)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *run,mlc,mlcommons,mlperf,training,train,mlperf-training,submission,checker,submission-checker,mlc-submission-checker* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "run mlc mlcommons mlperf training train mlperf-training submission checker submission-checker mlc-submission-checker" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=run,mlc,mlcommons,mlperf,training,train,mlperf-training,submission,checker,submission-checker,mlc-submission-checker` + +`cm run script --tags=run,mlc,mlcommons,mlperf,training,train,mlperf-training,submission,checker,submission-checker,mlc-submission-checker[,variations] [--input_flags]` + +*or* + +`cmr "run mlc mlcommons mlperf training train mlperf-training submission checker submission-checker mlc-submission-checker"` + +`cmr "run mlc mlcommons mlperf training train mlperf-training submission checker submission-checker mlc-submission-checker [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'run,mlc,mlcommons,mlperf,training,train,mlperf-training,submission,checker,submission-checker,mlc-submission-checker' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="run,mlc,mlcommons,mlperf,training,train,mlperf-training,submission,checker,submission-checker,mlc-submission-checker"``` + +#### Run this script via Docker (beta) + +`cm docker script "run mlc mlcommons mlperf training train mlperf-training submission checker submission-checker mlc-submission-checker[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_short-run` + - Environment variables: + - *CM_MLPERF_SHORT_RUN*: `yes` + - Workflow: + +
+ + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--extra_args=value` → `CM_MLPERF_SUBMISSION_CHECKER_EXTRA_ARGS=value` +* `--input=value` → `CM_MLPERF_SUBMISSION_DIR=value` +* `--power=value` → `CM_MLPERF_POWER=value` +* `--push_to_github=value` → `CM_MLPERF_RESULT_PUSH_TO_GITHUB=value` +* `--skip_compliance=value` → `CM_MLPERF_SKIP_COMPLIANCE=value` +* `--skip_power_check=value` → `CM_MLPERF_SKIP_POWER_CHECK=value` +* `--src_version=value` → `CM_MLPERF_SUBMISSION_CHECKER_VERSION=value` +* `--submission_dir=value` → `CM_MLPERF_SUBMISSION_DIR=value` +* `--submitter=value` → `CM_MLPERF_SUBMITTER=value` +* `--tar=value` → `CM_TAR_SUBMISSION_DIR=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "extra_args":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_MLPERF_SHORT_RUN: `no` + +
+ +#### Versions +Default version: `master` + +* `master` +* `r3.0` +* `r3.1` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-training-submission-checker/_cm.json)*** + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,mlcommons,inference,src + * CM names: `--adr.['inference-src', 'submission-checker-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * install,mlperf,logging,from.src + - CM script: [install-mlperf-logging-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-mlperf-logging-from-src) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-training-submission-checker/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-training-submission-checker/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-training-submission-checker/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-training-submission-checker/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-training-submission-checker/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-training-submission-checker/_cm.json)*** + * publish-results,github + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_RESULT_PUSH_TO_GITHUB': ['on']}` + * CM names: `--adr.['push-to-github']...` + - CM script: [push-mlperf-inference-results-to-github](https://github.com/mlcommons/cm4mlops/tree/master/script/push-mlperf-inference-results-to-github) + * run,tar + * Enable this dependency only if all ENV vars are set:
+`{'CM_TAR_SUBMISSION_DIR': ['yes']}` + - CM script: [tar-my-folder](https://github.com/mlcommons/cm4mlops/tree/master/script/tar-my-folder) + +___ +### Script output +`cmr "run mlc mlcommons mlperf training train mlperf-training submission checker submission-checker mlc-submission-checker [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/truncate-mlperf-inference-accuracy-log.md b/docs/MLPerf-benchmark-support/truncate-mlperf-inference-accuracy-log.md new file mode 100644 index 0000000000..bd14b1c25c --- /dev/null +++ b/docs/MLPerf-benchmark-support/truncate-mlperf-inference-accuracy-log.md @@ -0,0 +1,145 @@ +Automatically generated README for this automation recipe: **truncate-mlperf-inference-accuracy-log** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=truncate-mlperf-inference-accuracy-log,9d5ec20434084d14) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/truncate-mlperf-inference-accuracy-log)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *run,mlc,mlcommons,mlperf,inference,mlperf-inference,truncation,truncator,truncate,accuracy,accuracy-log,accuracy-log-trancation,accuracy-log-truncator,mlc-accuracy-log-trancation,mlc-accuracy-log-truncator* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "run mlc mlcommons mlperf inference mlperf-inference truncation truncator truncate accuracy accuracy-log accuracy-log-trancation accuracy-log-truncator mlc-accuracy-log-trancation mlc-accuracy-log-truncator" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=run,mlc,mlcommons,mlperf,inference,mlperf-inference,truncation,truncator,truncate,accuracy,accuracy-log,accuracy-log-trancation,accuracy-log-truncator,mlc-accuracy-log-trancation,mlc-accuracy-log-truncator` + +`cm run script --tags=run,mlc,mlcommons,mlperf,inference,mlperf-inference,truncation,truncator,truncate,accuracy,accuracy-log,accuracy-log-trancation,accuracy-log-truncator,mlc-accuracy-log-trancation,mlc-accuracy-log-truncator [--input_flags]` + +*or* + +`cmr "run mlc mlcommons mlperf inference mlperf-inference truncation truncator truncate accuracy accuracy-log accuracy-log-trancation accuracy-log-truncator mlc-accuracy-log-trancation mlc-accuracy-log-truncator"` + +`cmr "run mlc mlcommons mlperf inference mlperf-inference truncation truncator truncate accuracy accuracy-log accuracy-log-trancation accuracy-log-truncator mlc-accuracy-log-trancation mlc-accuracy-log-truncator " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'run,mlc,mlcommons,mlperf,inference,mlperf-inference,truncation,truncator,truncate,accuracy,accuracy-log,accuracy-log-trancation,accuracy-log-truncator,mlc-accuracy-log-trancation,mlc-accuracy-log-truncator' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="run,mlc,mlcommons,mlperf,inference,mlperf-inference,truncation,truncator,truncate,accuracy,accuracy-log,accuracy-log-trancation,accuracy-log-truncator,mlc-accuracy-log-trancation,mlc-accuracy-log-truncator"``` + +#### Run this script via Docker (beta) + +`cm docker script "run mlc mlcommons mlperf inference mlperf-inference truncation truncator truncate accuracy accuracy-log accuracy-log-trancation accuracy-log-truncator mlc-accuracy-log-trancation mlc-accuracy-log-truncator" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--input=value` → `CM_MLPERF_INFERENCE_SUBMISSION_DIR=value` +* `--submission_dir=value` → `CM_MLPERF_INFERENCE_SUBMISSION_DIR=value` +* `--submitter=value` → `CM_MLPERF_SUBMITTER=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "input":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/truncate-mlperf-inference-accuracy-log/_cm.json)*** + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,mlcommons,inference,src + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,mlperf,submission,dir + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_INFERENCE_SUBMISSION_DIR': ['on']}` + * CM names: `--adr.['get-mlperf-submission-dir']...` + - CM script: [get-mlperf-inference-submission-dir](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-submission-dir) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/truncate-mlperf-inference-accuracy-log/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/truncate-mlperf-inference-accuracy-log/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/truncate-mlperf-inference-accuracy-log/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/truncate-mlperf-inference-accuracy-log/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/truncate-mlperf-inference-accuracy-log/_cm.json) + +___ +### Script output +`cmr "run mlc mlcommons mlperf inference mlperf-inference truncation truncator truncate accuracy accuracy-log accuracy-log-trancation accuracy-log-truncator mlc-accuracy-log-trancation mlc-accuracy-log-truncator " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Modular-AI-ML-application-pipeline/app-image-classification-onnx-py.md b/docs/Modular-AI-ML-application-pipeline/app-image-classification-onnx-py.md new file mode 100644 index 0000000000..eb4e16f3d2 --- /dev/null +++ b/docs/Modular-AI-ML-application-pipeline/app-image-classification-onnx-py.md @@ -0,0 +1,213 @@ +Automatically generated README for this automation recipe: **app-image-classification-onnx-py** + +Category: **Modular AI/ML application pipeline** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-image-classification-onnx-py,3d5e908e472b417e) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-onnx-py)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *modular,python,app,image-classification,onnx* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "modular python app image-classification onnx" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=modular,python,app,image-classification,onnx` + +`cm run script --tags=modular,python,app,image-classification,onnx[,variations] [--input_flags]` + +*or* + +`cmr "modular python app image-classification onnx"` + +`cmr "modular python app image-classification onnx [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + + +#### Input Flags + +* --**input**=Path to JPEG image to classify +* --**output**=Output directory (optional) +* --**j**=Print JSON output + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "input":...} +``` +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'modular,python,app,image-classification,onnx' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="modular,python,app,image-classification,onnx"``` + +#### Run this script via Docker (beta) + +`cm docker script "modular python app image-classification onnx[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * Group "**target**" +
+ Click here to expand this section. + + * **`_cpu`** (default) + - Environment variables: + - *USE_CPU*: `True` + - Workflow: + * `_cuda` + - Environment variables: + - *USE_CUDA*: `True` + - Workflow: + +
+ + +#### Default variations + +`_cpu` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--input=value` → `CM_IMAGE=value` +* `--output=value` → `CM_APP_IMAGE_CLASSIFICATION_ONNX_PY_OUTPUT=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "input":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_BATCH_COUNT: `1` +* CM_BATCH_SIZE: `1` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-onnx-py/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,cuda + * Enable this dependency only if all ENV vars are set:
+`{'USE_CUDA': [True]}` + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * get,cudnn + * Enable this dependency only if all ENV vars are set:
+`{'USE_CUDA': [True]}` + * CM names: `--adr.['cudnn']...` + - CM script: [get-cudnn](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cudnn) + * get,dataset,imagenet,image-classification,original,_run-during-docker-build + - CM script: [get-dataset-imagenet-val](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-val) + * get,dataset-aux,imagenet-aux,image-classification + - CM script: [get-dataset-imagenet-aux](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-aux) + * get,ml-model,resnet50,_onnx,image-classification + * CM names: `--adr.['ml-model']...` + - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) + * get,generic-python-lib,_package.Pillow + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.numpy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.opencv-python + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_onnxruntime + * Skip this dependenecy only if all ENV vars are set:
+`{'USE_CUDA': [True]}` + * CM names: `--adr.['onnxruntime']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_onnxruntime_gpu + * Enable this dependency only if all ENV vars are set:
+`{'USE_CUDA': [True]}` + * CM names: `--adr.['onnxruntime']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-onnx-py/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-onnx-py/_cm.yaml) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-onnx-py/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-onnx-py/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-onnx-py/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-onnx-py/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-onnx-py/_cm.yaml) + +___ +### Script output +`cmr "modular python app image-classification onnx [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_APP_IMAGE_CLASSIFICATION_ONNX_PY*` +#### New environment keys auto-detected from customize diff --git a/docs/Modular-AI-ML-application-pipeline/app-image-classification-tf-onnx-cpp.md b/docs/Modular-AI-ML-application-pipeline/app-image-classification-tf-onnx-cpp.md new file mode 100644 index 0000000000..4609e0f99f --- /dev/null +++ b/docs/Modular-AI-ML-application-pipeline/app-image-classification-tf-onnx-cpp.md @@ -0,0 +1,133 @@ +Automatically generated README for this automation recipe: **app-image-classification-tf-onnx-cpp** + +Category: **Modular AI/ML application pipeline** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-image-classification-tf-onnx-cpp,879ed32e47074033) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-tf-onnx-cpp)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *app,image-classification,cpp,tensorflow,onnx* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "app image-classification cpp tensorflow onnx" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=app,image-classification,cpp,tensorflow,onnx` + +`cm run script --tags=app,image-classification,cpp,tensorflow,onnx ` + +*or* + +`cmr "app image-classification cpp tensorflow onnx"` + +`cmr "app image-classification cpp tensorflow onnx " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'app,image-classification,cpp,tensorflow,onnx' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="app,image-classification,cpp,tensorflow,onnx"``` + +#### Run this script via Docker (beta) + +`cm docker script "app image-classification cpp tensorflow onnx" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_BATCH_COUNT: `1` +* CM_BATCH_SIZE: `1` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-tf-onnx-cpp/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,gcc + - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) + * get,dataset,image-classification,original + - CM script: [get-dataset-imagenet-val](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-val) + * get,dataset-aux,image-classification + - CM script: [get-dataset-imagenet-aux](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-aux) + * get,ml-model,raw,image-classification,resnet50,_onnx,_opset-11 + - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) + * tensorflow,from-src + - CM script: [install-tensorflow-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-tensorflow-from-src) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-tf-onnx-cpp/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-tf-onnx-cpp/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-tf-onnx-cpp/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-tf-onnx-cpp/_cm.json) + +___ +### Script output +`cmr "app image-classification cpp tensorflow onnx " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Modular-AI-ML-application-pipeline/app-image-classification-torch-py.md b/docs/Modular-AI-ML-application-pipeline/app-image-classification-torch-py.md new file mode 100644 index 0000000000..2f24137374 --- /dev/null +++ b/docs/Modular-AI-ML-application-pipeline/app-image-classification-torch-py.md @@ -0,0 +1,170 @@ +Automatically generated README for this automation recipe: **app-image-classification-torch-py** + +Category: **Modular AI/ML application pipeline** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-image-classification-torch-py,e3986ae887b84ca8) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-torch-py)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *app,image-classification,python,torch* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "app image-classification python torch" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=app,image-classification,python,torch` + +`cm run script --tags=app,image-classification,python,torch[,variations] ` + +*or* + +`cmr "app image-classification python torch"` + +`cmr "app image-classification python torch [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'app,image-classification,python,torch' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="app,image-classification,python,torch"``` + +#### Run this script via Docker (beta) + +`cm docker script "app image-classification python torch[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_cuda` + - Environment variables: + - *USE_CUDA*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,cuda + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_BATCH_COUNT: `1` +* CM_BATCH_SIZE: `1` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-torch-py/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,dataset,imagenet,image-classification,preprocessed + - CM script: [get-preprocessed-dataset-imagenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-imagenet) + * get,dataset-aux,imagenet-aux,image-classification + - CM script: [get-dataset-imagenet-aux](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-aux) + * get,imagenet-helper + - CM script: [get-dataset-imagenet-helper](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-helper) + * get,ml-model,image-classification,resnet50,_pytorch,_fp32 + - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) + * get,generic-python-lib,_torch + * Skip this dependenecy only if all ENV vars are set:
+`{'USE_CUDA': ['yes']}` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torch_cuda + * Enable this dependency only if all ENV vars are set:
+`{'USE_CUDA': ['yes']}` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torchvision + * Skip this dependenecy only if all ENV vars are set:
+`{'USE_CUDA': ['yes']}` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torchvision_cuda + * Enable this dependency only if all ENV vars are set:
+`{'USE_CUDA': ['yes']}` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-torch-py/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-torch-py/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-torch-py/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-torch-py/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-torch-py/_cm.json) + +___ +### Script output +`cmr "app image-classification python torch [,variations]" -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Modular-AI-ML-application-pipeline/app-image-classification-tvm-onnx-py.md b/docs/Modular-AI-ML-application-pipeline/app-image-classification-tvm-onnx-py.md new file mode 100644 index 0000000000..c94a3505fa --- /dev/null +++ b/docs/Modular-AI-ML-application-pipeline/app-image-classification-tvm-onnx-py.md @@ -0,0 +1,158 @@ +Automatically generated README for this automation recipe: **app-image-classification-tvm-onnx-py** + +Category: **Modular AI/ML application pipeline** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-image-classification-tvm-onnx-py,63080407db4d4ac4) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-tvm-onnx-py)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *app,image-classification,python,tvm-onnx* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "app image-classification python tvm-onnx" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=app,image-classification,python,tvm-onnx` + +`cm run script --tags=app,image-classification,python,tvm-onnx[,variations] ` + +*or* + +`cmr "app image-classification python tvm-onnx"` + +`cmr "app image-classification python tvm-onnx [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'app,image-classification,python,tvm-onnx' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="app,image-classification,python,tvm-onnx"``` + +#### Run this script via Docker (beta) + +`cm docker script "app image-classification python tvm-onnx[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_cuda` + - Environment variables: + - *USE_CUDA*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,cuda + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * `_llvm` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_BATCH_COUNT: `1` +* CM_BATCH_SIZE: `1` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-tvm-onnx-py/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,dataset,image-classification,original + - CM script: [get-dataset-imagenet-val](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-val) + * get,dataset-aux,image-classification + - CM script: [get-dataset-imagenet-aux](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-aux) + * get,raw,ml-model,image-classification,resnet50,_onnx + - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) + * get,generic-python-lib,_onnxruntime + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,tvm + * CM names: `--adr.['tvm']...` + - CM script: [get-tvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tvm) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-tvm-onnx-py/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-tvm-onnx-py/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-tvm-onnx-py/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-tvm-onnx-py/_cm.json) + +___ +### Script output +`cmr "app image-classification python tvm-onnx [,variations]" -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Modular-AI-ML-application-pipeline/app-stable-diffusion-onnx-py.md b/docs/Modular-AI-ML-application-pipeline/app-stable-diffusion-onnx-py.md new file mode 100644 index 0000000000..14858f184e --- /dev/null +++ b/docs/Modular-AI-ML-application-pipeline/app-stable-diffusion-onnx-py.md @@ -0,0 +1,203 @@ +Automatically generated README for this automation recipe: **app-stable-diffusion-onnx-py** + +Category: **Modular AI/ML application pipeline** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-stable-diffusion-onnx-py,4d33981ac3534b3b) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-stable-diffusion-onnx-py)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *modular,python,app,stable-diffusion,onnx* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "modular python app stable-diffusion onnx" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=modular,python,app,stable-diffusion,onnx` + +`cm run script --tags=modular,python,app,stable-diffusion,onnx[,variations] [--input_flags]` + +*or* + +`cmr "modular python app stable-diffusion onnx"` + +`cmr "modular python app stable-diffusion onnx [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + + +#### Input Flags + +* --**text**=Text to generate image +* --**output**=Output directory + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "text":...} +``` +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'modular,python,app,stable-diffusion,onnx' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="modular,python,app,stable-diffusion,onnx"``` + +#### Run this script via Docker (beta) + +`cm docker script "modular python app stable-diffusion onnx[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * Group "**target**" +
+ Click here to expand this section. + + * **`_cpu`** (default) + - Environment variables: + - *USE_CPU*: `True` + - *CM_DEVICE*: `cpu` + - Workflow: + * `_cuda` + - Environment variables: + - *USE_CUDA*: `True` + - *CM_DEVICE*: `cuda:0` + - Workflow: + +
+ + +#### Default variations + +`_cpu` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--output=value` → `CM_APP_STABLE_DIFFUSION_ONNX_PY_OUTPUT=value` +* `--text=value` → `CM_APP_STABLE_DIFFUSION_ONNX_PY_TEXT=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "output":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-stable-diffusion-onnx-py/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,cuda + * Enable this dependency only if all ENV vars are set:
+`{'USE_CUDA': [True]}` + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * get,cudnn + * Enable this dependency only if all ENV vars are set:
+`{'USE_CUDA': [True]}` + * CM names: `--adr.['cudnn']...` + - CM script: [get-cudnn](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cudnn) + * get,generic-python-lib,_package.optimum[onnxruntime] + * Skip this dependenecy only if all ENV vars are set:
+`{'USE_CUDA': [True]}` + * CM names: `--adr.['optimum']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.optimum[onnxruntime-gpu] + * Enable this dependency only if all ENV vars are set:
+`{'USE_CUDA': [True]}` + * CM names: `--adr.['optimum']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.diffusers + * CM names: `--adr.['diffusers']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,ml-model,huggingface,zoo,_model-stub.runwayml/stable-diffusion-v1-5 + - CM script: [get-ml-model-huggingface-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-huggingface-zoo) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-stable-diffusion-onnx-py/_cm.yaml) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-stable-diffusion-onnx-py/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-stable-diffusion-onnx-py/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-stable-diffusion-onnx-py/_cm.yaml) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-stable-diffusion-onnx-py/_cm.yaml) + +___ +### Script output +`cmr "modular python app stable-diffusion onnx [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Modular-MLPerf-benchmarks/app-mlperf-inference-dummy.md b/docs/Modular-MLPerf-benchmarks/app-mlperf-inference-dummy.md new file mode 100644 index 0000000000..64b91c4e3a --- /dev/null +++ b/docs/Modular-MLPerf-benchmarks/app-mlperf-inference-dummy.md @@ -0,0 +1,360 @@ +Automatically generated README for this automation recipe: **app-mlperf-inference-dummy** + +Category: **Modular MLPerf benchmarks** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-mlperf-inference-dummy,5b71627383a94576) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-dummy)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *reproduce,mlcommons,mlperf,inference,harness,dummy-harness,dummy* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "reproduce mlcommons mlperf inference harness dummy-harness dummy" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=reproduce,mlcommons,mlperf,inference,harness,dummy-harness,dummy` + +`cm run script --tags=reproduce,mlcommons,mlperf,inference,harness,dummy-harness,dummy[,variations] [--input_flags]` + +*or* + +`cmr "reproduce mlcommons mlperf inference harness dummy-harness dummy"` + +`cmr "reproduce mlcommons mlperf inference harness dummy-harness dummy [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'reproduce,mlcommons,mlperf,inference,harness,dummy-harness,dummy' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="reproduce,mlcommons,mlperf,inference,harness,dummy-harness,dummy"``` + +#### Run this script via Docker (beta) + +`cm docker script "reproduce mlcommons mlperf inference harness dummy-harness dummy[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *Internal group (variations should not be selected manually)* +
+ Click here to expand this section. + + * `_bert_` + - Workflow: + * `_gptj_` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,ml-model,gptj + * CM names: `--adr.['gptj-model']...` + - CM script: [get-ml-model-gptj](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-gptj) + * get,dataset,cnndm,_validation + - CM script: [get-dataset-cnndm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-cnndm) + * `_llama2-70b_` + - Workflow: + +
+ + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_pytorch,cpu` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_torch + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_pytorch,cuda` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_torch_cuda + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_singlestream,resnet50` + - Workflow: + * `_singlestream,retinanet` + - Workflow: + +
+ + + * Group "**backend**" +
+ Click here to expand this section. + + * **`_pytorch`** (default) + - Environment variables: + - *CM_MLPERF_BACKEND*: `pytorch` + - Workflow: + +
+ + + * Group "**batch-size**" +
+ Click here to expand this section. + + * `_bs.#` + - Workflow: + +
+ + + * Group "**device**" +
+ Click here to expand this section. + + * **`_cpu`** (default) + - Environment variables: + - *CM_MLPERF_DEVICE*: `cpu` + - Workflow: + * `_cuda` + - Environment variables: + - *CM_MLPERF_DEVICE*: `gpu` + - *CM_MLPERF_DEVICE_LIB_NAMESPEC*: `cudart` + - Workflow: + +
+ + + * Group "**loadgen-scenario**" +
+ Click here to expand this section. + + * `_multistream` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `MultiStream` + - Workflow: + * `_offline` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `Offline` + - Workflow: + * `_server` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `Server` + - Workflow: + * `_singlestream` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `SingleStream` + - Workflow: + +
+ + + * Group "**model**" +
+ Click here to expand this section. + + * `_bert-99` + - Environment variables: + - *CM_MODEL*: `bert-99` + - *CM_SQUAD_ACCURACY_DTYPE*: `float32` + - Workflow: + * `_bert-99.9` + - Environment variables: + - *CM_MODEL*: `bert-99.9` + - Workflow: + * `_gptj-99` + - Environment variables: + - *CM_MODEL*: `gptj-99` + - *CM_SQUAD_ACCURACY_DTYPE*: `float32` + - Workflow: + * `_gptj-99.9` + - Environment variables: + - *CM_MODEL*: `gptj-99.9` + - Workflow: + * `_llama2-70b-99` + - Environment variables: + - *CM_MODEL*: `llama2-70b-99` + - Workflow: + * `_llama2-70b-99.9` + - Environment variables: + - *CM_MODEL*: `llama2-70b-99.9` + - Workflow: + * **`_resnet50`** (default) + - Environment variables: + - *CM_MODEL*: `resnet50` + - Workflow: + * `_retinanet` + - Environment variables: + - *CM_MODEL*: `retinanet` + - Workflow: + +
+ + + * Group "**precision**" +
+ Click here to expand this section. + + * `_fp16` + - Workflow: + * `_fp32` + - Workflow: + * `_uint8` + - Workflow: + +
+ + +#### Default variations + +`_cpu,_pytorch,_resnet50` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--count=value` → `CM_MLPERF_LOADGEN_QUERY_COUNT=value` +* `--max_batchsize=value` → `CM_MLPERF_LOADGEN_MAX_BATCHSIZE=value` +* `--mlperf_conf=value` → `CM_MLPERF_CONF=value` +* `--mode=value` → `CM_MLPERF_LOADGEN_MODE=value` +* `--multistream_target_latency=value` → `CM_MLPERF_LOADGEN_MULTISTREAM_TARGET_LATENCY=value` +* `--offline_target_qps=value` → `CM_MLPERF_LOADGEN_OFFLINE_TARGET_QPS=value` +* `--output_dir=value` → `CM_MLPERF_OUTPUT_DIR=value` +* `--performance_sample_count=value` → `CM_MLPERF_LOADGEN_PERFORMANCE_SAMPLE_COUNT=value` +* `--rerun=value` → `CM_RERUN=value` +* `--results_repo=value` → `CM_MLPERF_INFERENCE_RESULTS_REPO=value` +* `--scenario=value` → `CM_MLPERF_LOADGEN_SCENARIO=value` +* `--server_target_qps=value` → `CM_MLPERF_LOADGEN_SERVER_TARGET_QPS=value` +* `--singlestream_target_latency=value` → `CM_MLPERF_LOADGEN_SINGLESTREAM_TARGET_LATENCY=value` +* `--skip_preprocess=value` → `CM_SKIP_PREPROCESS_DATASET=value` +* `--skip_preprocessing=value` → `CM_SKIP_PREPROCESS_DATASET=value` +* `--target_latency=value` → `CM_MLPERF_LOADGEN_TARGET_LATENCY=value` +* `--target_qps=value` → `CM_MLPERF_LOADGEN_TARGET_QPS=value` +* `--user_conf=value` → `CM_MLPERF_USER_CONF=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "count":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_MLPERF_LOADGEN_SCENARIO: `Offline` +* CM_MLPERF_LOADGEN_MODE: `performance` +* CM_SKIP_PREPROCESS_DATASET: `no` +* CM_SKIP_MODEL_DOWNLOAD: `no` +* CM_MLPERF_SUT_NAME_IMPLEMENTATION_PREFIX: `dummy_harness` +* CM_MLPERF_SKIP_RUN: `no` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-dummy/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,mlcommons,inference,src + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,mlcommons,inference,loadgen + * CM names: `--adr.['inference-loadgen']...` + - CM script: [get-mlperf-inference-loadgen](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-loadgen) + * generate,user-conf,mlperf,inference + * CM names: `--adr.['user-conf-generator']...` + - CM script: [generate-mlperf-inference-user-conf](https://github.com/mlcommons/cm4mlops/tree/master/script/generate-mlperf-inference-user-conf) + * get,generic-python-lib,_mlperf_logging + * CM names: `--adr.['mlperf-logging']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,git,repo + * CM names: `--adr.inference-results inference-code...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-dummy/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-dummy/_cm.yaml) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-dummy/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-dummy/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-dummy/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-dummy/_cm.yaml)*** + * benchmark-mlperf + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_RUN': ['yes', True]}` + * CM names: `--adr.['runner', 'mlperf-runner']...` + - CM script: [benchmark-program-mlperf](https://github.com/mlcommons/cm4mlops/tree/master/script/benchmark-program-mlperf) + * save,mlperf,inference,state + * CM names: `--adr.['save-mlperf-inference-state']...` + - CM script: [save-mlperf-inference-implementation-state](https://github.com/mlcommons/cm4mlops/tree/master/script/save-mlperf-inference-implementation-state) + +___ +### Script output +`cmr "reproduce mlcommons mlperf inference harness dummy-harness dummy [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_DATASET_*` +* `CM_HW_NAME` +* `CM_IMAGENET_ACCURACY_DTYPE` +* `CM_MAX_EXAMPLES` +* `CM_MLPERF_*` +* `CM_ML_MODEL_*` +* `CM_SQUAD_ACCURACY_DTYPE` +#### New environment keys auto-detected from customize diff --git a/docs/Modular-MLPerf-benchmarks/app-mlperf-inference-intel.md b/docs/Modular-MLPerf-benchmarks/app-mlperf-inference-intel.md new file mode 100644 index 0000000000..3278fbf447 --- /dev/null +++ b/docs/Modular-MLPerf-benchmarks/app-mlperf-inference-intel.md @@ -0,0 +1,621 @@ +Automatically generated README for this automation recipe: **app-mlperf-inference-intel** + +Category: **Modular MLPerf benchmarks** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-mlperf-inference-intel,c05a90433bb04cc1) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-intel)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *reproduce,mlcommons,mlperf,inference,harness,intel-harness,intel,intel-harness,intel* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "reproduce mlcommons mlperf inference harness intel-harness intel intel-harness intel" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=reproduce,mlcommons,mlperf,inference,harness,intel-harness,intel,intel-harness,intel` + +`cm run script --tags=reproduce,mlcommons,mlperf,inference,harness,intel-harness,intel,intel-harness,intel[,variations] [--input_flags]` + +*or* + +`cmr "reproduce mlcommons mlperf inference harness intel-harness intel intel-harness intel"` + +`cmr "reproduce mlcommons mlperf inference harness intel-harness intel intel-harness intel [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'reproduce,mlcommons,mlperf,inference,harness,intel-harness,intel,intel-harness,intel' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="reproduce,mlcommons,mlperf,inference,harness,intel-harness,intel,intel-harness,intel"``` + +#### Run this script via Docker (beta) + +`cm docker script "reproduce mlcommons mlperf inference harness intel-harness intel intel-harness intel[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *Internal group (variations should not be selected manually)* +
+ Click here to expand this section. + + * `_bert_` + - Environment variables: + - *CM_BENCHMARK*: `STANDALONE_BERT` + - *dataset_squad_tokenized_max_seq_length*: `384` + - *loadgen_buffer_size*: `10833` + - *loadgen_dataset_size*: `10833` + - Workflow: + * `_build-harness,bert_` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-sys-util,_rsync + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,dataset,original,squad + * CM names: `--adr.['squad-original']...` + - CM script: [get-dataset-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-squad) + * get,ml-model,bert-large,_pytorch,_int8 + * CM names: `--adr.['bert-large', 'ml-model']...` + - CM script: [get-ml-model-bert-large-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-bert-large-squad) + * get,generic-python-lib,_package.tokenization + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_calibration,gptj_` + - Workflow: + * `_gptj_` + - Environment variables: + - *CM_BENCHMARK*: `STANDALONE_GPTJ` + - Workflow: + * `_int4,gptj_` + - Environment variables: + - *INTEL_GPTJ_INT4*: `yes` + - Workflow: + * `_int8,gptj_` + - Environment variables: + - *INTEL_GPTJ_INT4*: `no` + - Workflow: + +
+ + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_bert_,network-client` + - Environment variables: + - *CM_BENCHMARK*: `NETWORK_BERT_CLIENT` + - Workflow: + * `_bert_,network-server` + - Environment variables: + - *CM_BENCHMARK*: `NETWORK_BERT_SERVER` + - Workflow: + * `_bert_,pytorch` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,conda,_name.bert-pt + - CM script: [get-conda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-conda) + * install,llvm,src,_tag.llvmorg-15.0.7,_runtimes.libcxx:libcxxabi:openmp,_clang,_release,_for-intel-mlperf-inference-v3.1-bert + - CM script: [install-llvm-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-llvm-src) + * get,generic-sys-util,_libffi7 + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic,conda-package,_package.python + * CM names: `--adr.['conda-package', 'python']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.ncurses,_source.conda-forge + * CM names: `--adr.['conda-package', 'ncurses']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic-sys-util,_numactl + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic,conda-package,_package.jemalloc,_source.conda-forge + * CM names: `--adr.['conda-package', 'jemalloc']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,pytorch,from.src,_for-intel-mlperf-inference-v3.1-bert + - CM script: [install-pytorch-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-pytorch-from-src) + * install,onednn,from.src,_for-intel-mlperf-inference-v3.1-bert + - CM script: [install-onednn-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-onednn-from-src) + * install,transformers,from.src,_for-intel-mlperf-inference-v3.1-bert + - CM script: [install-transformers-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-transformers-from-src) + * `_bs.#` + - Environment variables: + - *ML_MLPERF_MODEL_BATCH_SIZE*: `#` + - Workflow: + * `_gptj_,build-harness` + - Workflow: + * `_gptj_,pytorch` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,conda,_name.gptj-pt + - CM script: [get-conda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-conda) + * get,python,_conda.gptj-pt + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * install,llvm,src,_tag.llvmorg-16.0.6,_clang,_release,_for-intel-mlperf-inference-v3.1-gptj + - CM script: [install-llvm-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-llvm-src) + * get,generic,conda-package,_package.ncurses,_source.conda-forge + * CM names: `--adr.['conda-package', 'ncurses']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic-sys-util,_numactl + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic,conda-package,_package.jemalloc,_source.conda-forge + * CM names: `--adr.['conda-package', 'jemalloc']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * install,ipex,from.src,_for-intel-mlperf-inference-v3.1-gptj + - CM script: [install-ipex-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-ipex-from-src) + * get,generic,conda-package,_package.ninja + * Enable this dependency only if all ENV vars are set:
+`{'INTEL_GPTJ_INT4': ['yes']}` + * CM names: `--adr.['conda-package', 'ninja']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * install,tpp-pex,from.src,_for-intel-mlperf-inference-v3.1-gptj + * Enable this dependency only if all ENV vars are set:
+`{'INTEL_GPTJ_INT4': ['yes']}` + - CM script: [install-tpp-pytorch-extension](https://github.com/mlcommons/cm4mlops/tree/master/script/install-tpp-pytorch-extension) + * get,generic-python-lib,_package.transformers + * CM names: `--adr.['pip-package', 'transformers']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,mlcommons,inference,src + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,mlcommons,inference,loadgen,_custom-python + * CM names: `--adr.['inference-loadgen']...` + - CM script: [get-mlperf-inference-loadgen](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-loadgen) + * get,ml-model,large-language-model,gptj + * CM names: `--adr.['ml-model', 'gptj-model', 'gpt-j-model']...` + - CM script: [get-ml-model-gptj](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-gptj) + * get,generic-python-lib,_package.datasets + * CM names: `--adr.['pip-package', 'datasets']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.accelerate + * CM names: `--adr.['pip-package', 'accelerate']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_custom-python,_package.torch,_url.git+https://github.com/pytorch/pytorch.git@927dc662386af052018212c7d01309a506fc94cd + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_int4,gptj_,build-harness` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * reproduce,mlperf,inference,intel,harness,_calibration + * CM names: `--adr.['calibration']...` + - CM script: [app-mlperf-inference-intel](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-intel) + * get,generic-python-lib,_package.optimum + * CM names: `--adr.['pip-package', 'optimum']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_resnet50,uint8` + - Environment variables: + - *CM_IMAGENET_ACCURACY_DTYPE*: `int8` + - Workflow: + * `_sapphire-rapids.112c,gptj-99,offline,int4` + - Environment variables: + - *NUM_PROC*: `4` + - *KMP_BLOCKTIME*: `1` + - *WORKERS_PER_PROC*: `3` + - Workflow: + * `_sapphire-rapids.112c,gptj-99,offline,int8` + - Environment variables: + - *KMP_BLOCKTIME*: `1` + - *WORKERS_PER_PROC*: `2` + - Workflow: + * `_sapphire-rapids.112c,gptj-99,server,int4` + - Environment variables: + - *KMP_BLOCKTIME*: `1` + - *WORKERS_PER_PROC*: `4` + - Workflow: + * `_sapphire-rapids.112c,gptj-99,server,int8` + - Environment variables: + - *KMP_BLOCKTIME*: `1` + - *WORKERS_PER_PROC*: `2` + - Workflow: + * `_sapphire-rapids.24c,bert-99` + - Environment variables: + - *WORKERS_PER_PROC*: `1` + - Workflow: + * `_sapphire-rapids.24c,gptj-99,offline,int4` + - Environment variables: + - *KMP_BLOCKTIME*: `10` + - *WORKERS_PER_PROC*: `1` + - Workflow: + * `_sapphire-rapids.24c,gptj-99,offline,int8` + - Environment variables: + - *KMP_BLOCKTIME*: `10` + - *WORKERS_PER_PROC*: `1` + - Workflow: + +
+ + + * Group "**device**" +
+ Click here to expand this section. + + * **`_cpu`** (default) + - Environment variables: + - *CM_MLPERF_DEVICE*: `cpu` + - Workflow: + +
+ + + * Group "**framework**" +
+ Click here to expand this section. + + * **`_pytorch`** (default) + - Environment variables: + - *CM_MLPERF_BACKEND*: `pytorch` + - *CM_MLPERF_BACKEND_LIB_NAMESPEC*: `pytorch` + - Workflow: + +
+ + + * Group "**loadgen-batchsize**" +
+ Click here to expand this section. + + * `_batch_size.#` + - Environment variables: + - *CM_MLPERF_LOADGEN_BATCH_SIZE*: `#` + - Workflow: + +
+ + + * Group "**loadgen-scenario**" +
+ Click here to expand this section. + + * `_multistream` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `MultiStream` + - Workflow: + * `_offline` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `Offline` + - Workflow: + * `_server` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `Server` + - Workflow: + * `_singlestream` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `SingleStream` + - Workflow: + +
+ + + * Group "**model**" +
+ Click here to expand this section. + + * `_bert-99` + - Environment variables: + - *CM_MODEL*: `bert-99` + - *CM_SQUAD_ACCURACY_DTYPE*: `float32` + - *CM_NOT_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/3750364/files/bert_large_v1_1_fake_quant.onnx` + - Workflow: + * `_bert-99.9` + - Environment variables: + - *CM_MODEL*: `bert-99.9` + - *CM_NOT_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/3733910/files/model.onnx` + - Workflow: + * `_gptj-99` + - Environment variables: + - *CM_MODEL*: `gptj-99` + - *CM_NOT_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/3733910/files/model.onnx` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int8` + - Workflow: + * `_gptj-99.9` + - Environment variables: + - *CM_MODEL*: `gptj-99.9` + - *CM_NOT_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/3733910/files/model.onnx` + - Workflow: + * **`_resnet50`** (default) + - Environment variables: + - *CM_MODEL*: `resnet50` + - *dataset_imagenet_preprocessed_input_square_side*: `224` + - *ml_model_has_background_class*: `YES` + - *ml_model_image_height*: `224` + - *loadgen_buffer_size*: `1024` + - *loadgen_dataset_size*: `50000` + - *CM_BENCHMARK*: `STANDALONE_CLASSIFICATION` + - Workflow: + * `_retinanet` + - Environment variables: + - *CM_MODEL*: `retinanet` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/6617981/files/resnext50_32x4d_fpn.pth` + - *dataset_imagenet_preprocessed_input_square_side*: `224` + - *ml_model_image_height*: `800` + - *ml_model_image_width*: `800` + - *loadgen_buffer_size*: `64` + - *loadgen_dataset_size*: `24576` + - *CM_BENCHMARK*: `STANDALONE_OBJECT_DETECTION` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_numpy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + +
+ + + * Group "**network-mode**" +
+ Click here to expand this section. + + * `_network-server` + - Environment variables: + - *CM_MLPERF_NETWORK_RUN_MODE*: `network-server` + - Workflow: + * **`_standalone`** (default) + - Environment variables: + - *CM_MLPERF_NETWORK_RUN_MODE*: `standalone` + - Workflow: + +
+ + + * Group "**network-run-mode**" +
+ Click here to expand this section. + + * `_network-client` + - Environment variables: + - *CM_MLPERF_NETWORK_RUN_MODE*: `network-client` + - Workflow: + +
+ + + * Group "**power-mode**" +
+ Click here to expand this section. + + * `_maxn` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_MAXN*: `True` + - Workflow: + * `_maxq` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_MAXQ*: `True` + - Workflow: + +
+ + + * Group "**precision**" +
+ Click here to expand this section. + + * `_fp32` + - Environment variables: + - *CM_IMAGENET_ACCURACY_DTYPE*: `float32` + - Workflow: + * `_int4` + - Workflow: + * `_uint8` + - Workflow: + +
+ + + * Group "**run-mode**" +
+ Click here to expand this section. + + * `_build-harness` + - Environment variables: + - *CM_LOCAL_MLPERF_INFERENCE_INTEL_RUN_MODE*: `build_harness` + - Workflow: + * `_calibration` + - Environment variables: + - *CM_LOCAL_MLPERF_INFERENCE_INTEL_RUN_MODE*: `calibration` + - Workflow: + * **`_run-harness`** (default) + - Environment variables: + - *CM_LOCAL_MLPERF_INFERENCE_INTEL_RUN_MODE*: `run_harness` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * reproduce,mlperf,inference,intel,harness,_build-harness + * CM names: `--adr.['build-harness']...` + - CM script: [app-mlperf-inference-intel](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-intel) + * get,mlcommons,inference,src + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * generate,user-conf,mlperf,inference + * CM names: `--adr.['user-conf-generator']...` + - CM script: [generate-mlperf-inference-user-conf](https://github.com/mlcommons/cm4mlops/tree/master/script/generate-mlperf-inference-user-conf) + +
+ + + * Group "**sut**" +
+ Click here to expand this section. + + * `_sapphire-rapids.112c` + - Environment variables: + - *WARMUP*: ` --warmup` + - Workflow: + * `_sapphire-rapids.24c` + - Workflow: + +
+ + +#### Default variations + +`_cpu,_pytorch,_resnet50,_run-harness,_standalone` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--count=value` → `CM_MLPERF_LOADGEN_QUERY_COUNT=value` +* `--max_batchsize=value` → `CM_MLPERF_LOADGEN_MAX_BATCHSIZE=value` +* `--mlperf_conf=value` → `CM_MLPERF_CONF=value` +* `--mode=value` → `CM_MLPERF_LOADGEN_MODE=value` +* `--multistream_target_latency=value` → `CM_MLPERF_LOADGEN_MULTISTREAM_TARGET_LATENCY=value` +* `--offline_target_qps=value` → `CM_MLPERF_LOADGEN_OFFLINE_TARGET_QPS=value` +* `--output_dir=value` → `CM_MLPERF_OUTPUT_DIR=value` +* `--performance_sample_count=value` → `CM_MLPERF_LOADGEN_PERFORMANCE_SAMPLE_COUNT=value` +* `--rerun=value` → `CM_RERUN=value` +* `--scenario=value` → `CM_MLPERF_LOADGEN_SCENARIO=value` +* `--server_target_qps=value` → `CM_MLPERF_LOADGEN_SERVER_TARGET_QPS=value` +* `--singlestream_target_latency=value` → `CM_MLPERF_LOADGEN_SINGLESTREAM_TARGET_LATENCY=value` +* `--skip_preprocess=value` → `CM_SKIP_PREPROCESS_DATASET=value` +* `--skip_preprocessing=value` → `CM_SKIP_PREPROCESS_DATASET=value` +* `--target_latency=value` → `CM_MLPERF_LOADGEN_TARGET_LATENCY=value` +* `--target_qps=value` → `CM_MLPERF_LOADGEN_TARGET_QPS=value` +* `--user_conf=value` → `CM_MLPERF_USER_CONF=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "count":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_BATCH_COUNT: `1` +* CM_BATCH_SIZE: `1` +* CM_FAST_COMPILATION: `yes` +* CM_MLPERF_LOADGEN_SCENARIO: `Offline` +* CM_MLPERF_LOADGEN_MODE: `performance` +* CM_SKIP_PREPROCESS_DATASET: `no` +* CM_SKIP_MODEL_DOWNLOAD: `no` +* CM_MLPERF_SUT_NAME_IMPLEMENTATION_PREFIX: `intel` +* CM_MLPERF_SKIP_RUN: `no` +* verbosity: `1` +* loadgen_trigger_cold_run: `0` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-intel/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,generic-python-lib,_mlperf_logging + * CM names: `--adr.['mlperf-logging']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,ml-model,resnet50,_fp32,_onnx,_from-tf + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['resnet50']}` + * CM names: `--adr.['resnet50-model', 'ml-model']...` + - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) + * compile,intel,model,_resnet50 + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['resnet50']}` + * CM names: `--adr.['resnet50-compiler']...` + - *Warning: no scripts found* + * get,dataset,imagenet,preprocessed,_for.resnet50,_NHWC,_full + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['resnet50']}` + * CM names: `--adr.['imagenet-preprocessed', 'dataset-preprocessed']...` + - CM script: [get-preprocessed-dataset-imagenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-imagenet) + * compile,intel,model,_retinanet + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['retinanet']}` + * CM names: `--adr.['retinanet-compiler']...` + - *Warning: no scripts found* + * get,dataset,preprocessed,openimages,_for.retinanet.onnx,_NCHW,_validation,_custom-annotations + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['retinanet']}` + * CM names: `--adr.['openimages-preprocessed', 'dataset-preprocessed']...` + - CM script: [get-preprocessed-dataset-openimages](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-openimages) + * get,mlperf,inference,results,_ctuning + * CM names: `--adr.inference-results...` + - CM script: [get-mlperf-inference-results](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-results) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-intel/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-intel/_cm.yaml) + 1. ***Run native script if exists*** + * [run_bert_harness.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-intel/run_bert_harness.sh) + * [run_gptj_harness.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-intel/run_gptj_harness.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-intel/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-intel/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-intel/_cm.yaml)*** + * benchmark-mlperf + * Enable this dependency only if all ENV vars are set:
+`{'CM_LOCAL_MLPERF_INFERENCE_INTEL_RUN_MODE': ['run_harness']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_RUN': ['yes', True]}` + * CM names: `--adr.['runner', 'mlperf-runner']...` + - CM script: [benchmark-program-mlperf](https://github.com/mlcommons/cm4mlops/tree/master/script/benchmark-program-mlperf) + * save,mlperf,inference,state + * CM names: `--adr.['save-mlperf-inference-state']...` + - CM script: [save-mlperf-inference-implementation-state](https://github.com/mlcommons/cm4mlops/tree/master/script/save-mlperf-inference-implementation-state) + +___ +### Script output +`cmr "reproduce mlcommons mlperf inference harness intel-harness intel intel-harness intel [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Modular-MLPerf-benchmarks/app-mlperf-inference-qualcomm.md b/docs/Modular-MLPerf-benchmarks/app-mlperf-inference-qualcomm.md new file mode 100644 index 0000000000..6205c2108c --- /dev/null +++ b/docs/Modular-MLPerf-benchmarks/app-mlperf-inference-qualcomm.md @@ -0,0 +1,775 @@ +Automatically generated README for this automation recipe: **app-mlperf-inference-qualcomm** + +Category: **Modular MLPerf benchmarks** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-mlperf-inference-qualcomm,eef1aca5d7c0470e) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-qualcomm)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *reproduce,mlcommons,mlperf,inference,harness,qualcomm-harness,qualcomm,kilt-harness,kilt* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "reproduce mlcommons mlperf inference harness qualcomm-harness qualcomm kilt-harness kilt" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=reproduce,mlcommons,mlperf,inference,harness,qualcomm-harness,qualcomm,kilt-harness,kilt` + +`cm run script --tags=reproduce,mlcommons,mlperf,inference,harness,qualcomm-harness,qualcomm,kilt-harness,kilt[,variations] [--input_flags]` + +*or* + +`cmr "reproduce mlcommons mlperf inference harness qualcomm-harness qualcomm kilt-harness kilt"` + +`cmr "reproduce mlcommons mlperf inference harness qualcomm-harness qualcomm kilt-harness kilt [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'reproduce,mlcommons,mlperf,inference,harness,qualcomm-harness,qualcomm,kilt-harness,kilt' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="reproduce,mlcommons,mlperf,inference,harness,qualcomm-harness,qualcomm,kilt-harness,kilt"``` + +#### Run this script via Docker (beta) + +`cm docker script "reproduce mlcommons mlperf inference harness qualcomm-harness qualcomm kilt-harness kilt[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *Internal group (variations should not be selected manually)* +
+ Click here to expand this section. + + * `_bert_` + - Environment variables: + - *CM_BENCHMARK*: `STANDALONE_BERT` + - *kilt_model_name*: `bert` + - *kilt_model_seq_length*: `384` + - *kilt_model_bert_variant*: `BERT_PACKED` + - *kilt_input_format*: `INT64,1,384:INT64,1,8:INT64,1,384:INT64,1,384` + - *kilt_output_format*: `FLOAT32,1,384:FLOAT32,1,384` + - *dataset_squad_tokenized_max_seq_length*: `384` + - *loadgen_buffer_size*: `10833` + - *loadgen_dataset_size*: `10833` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_transformers + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_safetensors + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_onnx + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + +
+ + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_activation-count.#` + - Environment variables: + - *CM_MLPERF_QAIC_ACTIVATION_COUNT*: `#` + - Workflow: + * `_bert-99,offline` + - Workflow: + * `_bert-99,qaic` + - Environment variables: + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `https://github.com/mlcommons/inference_results_v3.1/blob/main/closed/Qualcomm/calibration.md` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int32` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int8,fp16` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * compile,qaic,model,_bert-99,_pc.99.9980 + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_RUN': [True]}` + * CM names: `--adr.['qaic-model-compiler', 'bert-99-compiler']...` + - CM script: [compile-model-for.qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/compile-model-for.qaic) + * `_bert-99.9,offline` + - Workflow: + * `_bert-99.9,qaic` + - Environment variables: + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `https://github.com/mlcommons/inference_results_v3.1/blob/main/closed/Qualcomm/calibration.md` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int32` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp16` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * compile,qaic,model,_bert-99.9 + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_RUN': [True]}` + * CM names: `--adr.['qaic-model-compiler', 'bert-99.9-compiler']...` + - CM script: [compile-model-for.qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/compile-model-for.qaic) + * `_bert_,network-client` + - Environment variables: + - *CM_BENCHMARK*: `NETWORK_BERT_CLIENT` + - Workflow: + * `_bert_,network-server` + - Environment variables: + - *CM_BENCHMARK*: `NETWORK_BERT_SERVER` + - Workflow: + * `_bert_,qaic` + - Environment variables: + - *kilt_model_batch_size*: `1` + - *kilt_input_format*: `UINT32,1,384:UINT32,1,8:UINT32,1,384:UINT32,1,384` + - *kilt_input_formata*: `UINT32,1,384:UINT32,1,384:UINT32,1,384` + - *kilt_output_formatia*: `UINT8,1,384:UINT8,1,384` + - *kilt_device_qaic_skip_stage*: `convert` + - Workflow: + * `_bert_,singlestream` + - Environment variables: + - *kilt_model_batch_size*: `1` + - Workflow: + * `_dl2q.24xlarge,bert-99,offline` + - Environment variables: + - *qaic_activation_count*: `14` + - Workflow: + * `_dl2q.24xlarge,bert-99.9,offline` + - Environment variables: + - *qaic_activation_count*: `7` + - Workflow: + * `_dl2q.24xlarge,bert-99.9,server` + - Environment variables: + - *qaic_activation_count*: `7` + - Workflow: + * `_dl2q.24xlarge,resnet50,multistream` + - Environment variables: + - *qaic_activation_count*: `1` + - Workflow: + * `_dl2q.24xlarge,resnet50,offline` + - Environment variables: + - *qaic_activation_count*: `3` + - Workflow: + * `_dl2q.24xlarge,resnet50,server` + - Environment variables: + - *qaic_activation_count*: `3` + - Workflow: + * `_dl2q.24xlarge,retinanet,offline` + - Environment variables: + - *qaic_activation_count*: `14` + - Workflow: + * `_dl2q.24xlarge,retinanet,server` + - Environment variables: + - *qaic_activation_count*: `14` + - Workflow: + * `_dl2q.24xlarge,singlestream` + - Environment variables: + - *CM_QAIC_DEVICES*: `0` + - *qaic_activation_count*: `1` + - Workflow: + * `_num-devices.4` + - Environment variables: + - *CM_QAIC_DEVICES*: `0,1,2,3` + - Workflow: + * `_pro` + - Environment variables: + - *qaic_queue_length*: `10` + - Workflow: + * `_pro,num-devices.4,bert-99,offline` + - Environment variables: + - *qaic_activation_count*: `16` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * set,device,qaic,_vc.15 + - CM script: [set-device-settings-qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/set-device-settings-qaic) + * `_pro,num-devices.4,bert-99,server` + - Environment variables: + - *qaic_activation_count*: `16` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * set,device,qaic,_vc.13 + - CM script: [set-device-settings-qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/set-device-settings-qaic) + * `_pro,num-devices.4,bert-99.9,offline` + - Environment variables: + - *qaic_activation_count*: `8` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * set,device,qaic,_vc.13 + - CM script: [set-device-settings-qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/set-device-settings-qaic) + * `_pro,num-devices.4,bert-99.9,server` + - Environment variables: + - *qaic_activation_count*: `8` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * set,device,qaic,_vc.13 + - CM script: [set-device-settings-qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/set-device-settings-qaic) + * `_pro,num-devices.4,resnet50,offline` + - Environment variables: + - *qaic_activation_count*: `4` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * set,device,qaic,_vc.16 + - CM script: [set-device-settings-qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/set-device-settings-qaic) + * `_pro,num-devices.4,resnet50,server` + - Environment variables: + - *qaic_activation_count*: `4` + - Workflow: + * `_pro,num-devices.4,retinanet,offline` + - Environment variables: + - *qaic_activation_count*: `16` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * set,device,qaic,_vc.17 + - CM script: [set-device-settings-qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/set-device-settings-qaic) + * `_pro,num-devices.4,retinanet,server` + - Environment variables: + - *qaic_activation_count*: `16` + - Workflow: + * `_pro,num-devices.4,singlestream` + - Environment variables: + - *CM_QAIC_DEVICES*: `0` + - *qaic_activation_count*: `1` + - Workflow: + * `_rb6,bert-99,offline` + - Environment variables: + - *qaic_activation_count*: `9` + - Workflow: + * `_rb6,resnet50,multistream` + - Environment variables: + - *qaic_activation_count*: `2` + - Workflow: + * `_rb6,resnet50,offline` + - Environment variables: + - *qaic_activation_count*: `2` + - Workflow: + * `_rb6,retinanet,multistream` + - Environment variables: + - *qaic_activation_count*: `8` + - Workflow: + * `_rb6,retinanet,offline` + - Environment variables: + - *qaic_activation_count*: `9` + - Workflow: + * `_rb6,singlestream` + - Environment variables: + - *qaic_activation_count*: `1` + - Workflow: + * `_resnet50,uint8` + - Environment variables: + - *kilt_input_format*: `UINT8,-1,224,224,3` + - *kilt_device_qaic_skip_stage*: `convert` + - *CM_IMAGENET_ACCURACY_DTYPE*: `int8` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `https://github.com/mlcommons/inference_results_v3.1/blob/main/closed/Qualcomm/calibration.md` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int8` + - Workflow: + * `_retinanet,qaic,uint8` + - Environment variables: + - *kilt_device_qaic_skip_stage*: `convert` + - *kilt_input_format*: `UINT8,1,3,800,800` + - *kilt_output_format*: `INT8,1,1000:INT8,1,1000:INT8,1,1000:INT8,1,1000:INT8,1,1000:INT8,1,1000:INT8,1,1000:INT8,1,1000:INT8,1,1000:INT8,1,1000:INT8,1,4,1000:INT8,14,1000:INT8,1,4,1000:INT8,1,4,1000:INT8,1,4,1000` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `https://github.com/mlcommons/inference_results_v3.1/blob/main/closed/Qualcomm/calibration.md` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int8` + - Workflow: + * `_singlestream,resnet50` + - Workflow: + * `_singlestream,retinanet` + - Workflow: + +
+ + + * Group "**batch-size**" +
+ Click here to expand this section. + + * `_bs.#` + - Environment variables: + - *kilt_model_batch_size*: `#` + - Workflow: + * `_bs.0` + - Environment variables: + - *kilt_model_batch_size*: `1` + - Workflow: + +
+ + + * Group "**device**" +
+ Click here to expand this section. + + * **`_cpu`** (default) + - Environment variables: + - *CM_MLPERF_DEVICE*: `cpu` + - *kilt_backend_type*: `cpu` + - Workflow: + * `_cuda` + - Environment variables: + - *CM_MLPERF_DEVICE*: `gpu` + - *CM_MLPERF_DEVICE_LIB_NAMESPEC*: `cudart` + - *kilt_backend_type*: `gpu` + - Workflow: + * `_qaic` + - Environment variables: + - *CM_MLPERF_DEVICE*: `qaic` + - *CM_MLPERF_DEVICE_LIB_NAMESPEC*: `QAic` + - *kilt_backend_type*: `qaic` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,qaic,platform,sdk + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_RUN': [True]}` + - CM script: [get-qaic-platform-sdk](https://github.com/mlcommons/cm4mlops/tree/master/script/get-qaic-platform-sdk) + * get,lib,protobuf,_tag.v3.11.4 + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_RUN': [True]}` + - CM script: [get-lib-protobuf](https://github.com/mlcommons/cm4mlops/tree/master/script/get-lib-protobuf) + * set,device,mode,qaic + * Enable this dependency only if all ENV vars are set:
+`{'CM_QAIC_VC': 'on'}` + - CM script: [set-device-settings-qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/set-device-settings-qaic) + * set,device,mode,qaic,_ecc + * Enable this dependency only if all ENV vars are set:
+`{'CM_QAIC_ECC': 'yes'}` + - CM script: [set-device-settings-qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/set-device-settings-qaic) + +
+ + + * Group "**framework**" +
+ Click here to expand this section. + + * `_glow` + - Environment variables: + - *device*: `qaic` + - *CM_MLPERF_BACKEND*: `glow` + - *CM_MLPERF_BACKEND_LIB_NAMESPEC*: `QAic` + - Workflow: + * **`_onnxruntime`** (default) + - Environment variables: + - *device*: `onnxrt` + - *CM_MLPERF_BACKEND*: `onnxruntime` + - *CM_MLPERF_BACKEND_LIB_NAMESPEC*: `onnxruntime` + - Workflow: + * `_tensorrt` + - Environment variables: + - *CM_MLPERF_BACKEND*: `tensorrt` + - *device*: `tensorrt` + - *CM_MLPERF_BACKEND_NAME*: `TensorRT` + - Workflow: + +
+ + + * Group "**loadgen-batch-size**" +
+ Click here to expand this section. + + * `_loadgen-batch-size.#` + - Environment variables: + - *CM_MLPERF_LOADGEN_BATCH_SIZE*: `#` + - Workflow: + +
+ + + * Group "**loadgen-scenario**" +
+ Click here to expand this section. + + * `_multistream` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `MultiStream` + - Workflow: + * `_offline` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `Offline` + - Workflow: + * `_server` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `Server` + - Workflow: + * `_singlestream` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `SingleStream` + - Workflow: + +
+ + + * Group "**model**" +
+ Click here to expand this section. + + * `_bert-99` + - Environment variables: + - *CM_MODEL*: `bert-99` + - *CM_SQUAD_ACCURACY_DTYPE*: `float32` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/3750364/files/bert_large_v1_1_fake_quant.onnx` + - Workflow: + * `_bert-99.9` + - Environment variables: + - *CM_MODEL*: `bert-99.9` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/3733910/files/model.onnx` + - Workflow: + * **`_resnet50`** (default) + - Environment variables: + - *CM_MODEL*: `resnet50` + - *kilt_model_name*: `resnet50` + - *kilt_input_count*: `1` + - *kilt_output_count*: `1` + - *kilt_input_format*: `FLOAT32,-1,224,224,3` + - *kilt_output_format*: `INT64,-1` + - *dataset_imagenet_preprocessed_input_square_side*: `224` + - *ml_model_has_background_class*: `YES` + - *ml_model_image_height*: `224` + - *loadgen_buffer_size*: `1024` + - *loadgen_dataset_size*: `50000` + - *CM_BENCHMARK*: `STANDALONE_CLASSIFICATION` + - Workflow: + * `_retinanet` + - Environment variables: + - *CM_MODEL*: `retinanet` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/6617981/files/resnext50_32x4d_fpn.pth` + - *kilt_model_name*: `retinanet` + - *kilt_input_count*: `1` + - *kilt_model_max_detections*: `600` + - *kilt_output_count*: `1` + - *kilt_input_format*: `FLOAT32,-1,3,800,800` + - *kilt_output_format*: `INT64,-1` + - *dataset_imagenet_preprocessed_input_square_side*: `224` + - *ml_model_image_height*: `800` + - *ml_model_image_width*: `800` + - *loadgen_buffer_size*: `64` + - *loadgen_dataset_size*: `24576` + - *CM_BENCHMARK*: `STANDALONE_OBJECT_DETECTION` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_Pillow + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torch + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torchvision + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_opencv-python + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_numpy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_pycocotools + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + +
+ + + * Group "**nsp**" +
+ Click here to expand this section. + + * `_nsp.#` + - Workflow: + * `_nsp.14` + - Workflow: + * `_nsp.16` + - Workflow: + +
+ + + * Group "**power-mode**" +
+ Click here to expand this section. + + * `_maxn` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_MAXN*: `True` + - Workflow: + * `_maxq` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_MAXQ*: `True` + - Workflow: + +
+ + + * Group "**precision**" +
+ Click here to expand this section. + + * `_fp16` + - Workflow: + * `_fp32` + - Environment variables: + - *CM_IMAGENET_ACCURACY_DTYPE*: `float32` + - Workflow: + * `_uint8` + - Workflow: + +
+ + + * Group "**run-mode**" +
+ Click here to expand this section. + + * `_network-client` + - Environment variables: + - *CM_RUN_MODE*: `network-client` + - Workflow: + * `_network-server` + - Environment variables: + - *CM_RUN_MODE*: `network-server` + - Workflow: + * **`_standalone`** (default) + - Environment variables: + - *CM_RUN_MODE*: `standalone` + - Workflow: + +
+ + + * Group "**sut**" +
+ Click here to expand this section. + + * `_dl2q.24xlarge` + - Environment variables: + - *CM_QAIC_DEVICES*: `0,1,2,3,4,5,6,7` + - *qaic_queue_length*: `4` + - Workflow: + * `_rb6` + - Environment variables: + - *CM_QAIC_DEVICES*: `0` + - *qaic_queue_length*: `6` + - Workflow: + +
+ + +#### Default variations + +`_cpu,_onnxruntime,_resnet50,_standalone` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--count=value` → `CM_MLPERF_LOADGEN_QUERY_COUNT=value` +* `--devices=value` → `CM_QAIC_DEVICES=value` +* `--max_batchsize=value` → `CM_MLPERF_LOADGEN_MAX_BATCHSIZE=value` +* `--mlperf_conf=value` → `CM_MLPERF_CONF=value` +* `--mode=value` → `CM_MLPERF_LOADGEN_MODE=value` +* `--multistream_target_latency=value` → `CM_MLPERF_LOADGEN_MULTISTREAM_TARGET_LATENCY=value` +* `--offline_target_qps=value` → `CM_MLPERF_LOADGEN_OFFLINE_TARGET_QPS=value` +* `--output_dir=value` → `CM_MLPERF_OUTPUT_DIR=value` +* `--performance_sample_count=value` → `CM_MLPERF_LOADGEN_PERFORMANCE_SAMPLE_COUNT=value` +* `--rerun=value` → `CM_RERUN=value` +* `--scenario=value` → `CM_MLPERF_LOADGEN_SCENARIO=value` +* `--server_target_qps=value` → `CM_MLPERF_LOADGEN_SERVER_TARGET_QPS=value` +* `--singlestream_target_latency=value` → `CM_MLPERF_LOADGEN_SINGLESTREAM_TARGET_LATENCY=value` +* `--skip_preprocess=value` → `CM_SKIP_PREPROCESS_DATASET=value` +* `--skip_preprocessing=value` → `CM_SKIP_PREPROCESS_DATASET=value` +* `--target_latency=value` → `CM_MLPERF_LOADGEN_TARGET_LATENCY=value` +* `--target_qps=value` → `CM_MLPERF_LOADGEN_TARGET_QPS=value` +* `--user_conf=value` → `CM_MLPERF_USER_CONF=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "count":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_BATCH_COUNT: `1` +* CM_BATCH_SIZE: `1` +* CM_FAST_COMPILATION: `yes` +* CM_MLPERF_LOADGEN_SCENARIO: `Offline` +* CM_MLPERF_LOADGEN_MODE: `performance` +* CM_SKIP_PREPROCESS_DATASET: `no` +* CM_SKIP_MODEL_DOWNLOAD: `no` +* CM_MLPERF_SUT_NAME_IMPLEMENTATION_PREFIX: `kilt` +* CM_MLPERF_SKIP_RUN: `no` +* CM_KILT_REPO_URL: `https://github.com/GATEOverflow/kilt-mlperf` +* CM_QAIC_DEVICES: `0` +* kilt_max_wait_abs: `10000` +* verbosity: `0` +* loadgen_trigger_cold_run: `0` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-qualcomm/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,git,repo + * CM names: `--adr.['kilt-repo']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + * get,mlcommons,inference,src + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,mlcommons,inference,loadgen + * CM names: `--adr.['inference-loadgen']...` + - CM script: [get-mlperf-inference-loadgen](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-loadgen) + * generate,user-conf,mlperf,inference + * CM names: `--adr.['user-conf-generator']...` + - CM script: [generate-mlperf-inference-user-conf](https://github.com/mlcommons/cm4mlops/tree/master/script/generate-mlperf-inference-user-conf) + * get,generic-python-lib,_mlperf_logging + * CM names: `--adr.['mlperf-logging']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,ml-model,resnet50,_fp32,_onnx,_from-tf + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['resnet50']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_DEVICE': ['qaic']}` + * CM names: `--adr.['resnet50-model', 'ml-model']...` + - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) + * compile,qaic,model,_resnet50 + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['resnet50'], 'CM_MLPERF_DEVICE': ['qaic']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_RUN': [True]}` + * CM names: `--adr.['qaic-model-compiler', 'resnet50-compiler']...` + - CM script: [compile-model-for.qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/compile-model-for.qaic) + * get,dataset,imagenet,preprocessed,_for.resnet50,_NHWC,_full + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['resnet50']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_RUN': [True]}` + * CM names: `--adr.['imagenet-preprocessed', 'dataset-preprocessed']...` + - CM script: [get-preprocessed-dataset-imagenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-imagenet) + * get,squad-vocab + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['bert-99', 'bert-99.9']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_RUN': [True]}` + * CM names: `--adr.['bert-vocab']...` + - CM script: [get-dataset-squad-vocab](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-squad-vocab) + * get,dataset,tokenized,squad,_raw + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['bert-99', 'bert-99.9']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_RUN': [True]}` + * CM names: `--adr.['squad-tokenized']...` + - CM script: [get-preprocessed-dataset-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-squad) + * compile,qaic,model,_retinanet + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['retinanet'], 'CM_MLPERF_DEVICE': ['qaic']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_RUN': [True]}` + * CM names: `--adr.['qaic-model-compiler', 'retinanet-compiler']...` + - CM script: [compile-model-for.qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/compile-model-for.qaic) + * get,dataset,preprocessed,openimages,_for.retinanet.onnx,_NCHW,_validation,_custom-annotations + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['retinanet']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_RUN': [True]}` + * CM names: `--adr.['openimages-preprocessed', 'dataset-preprocessed']...` + - CM script: [get-preprocessed-dataset-openimages](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-openimages) + * get,lib,onnxruntime,lang-cpp,_cpu + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['onnxruntime'], 'CM_MLPERF_DEVICE': ['cpu']}` + - CM script: [get-onnxruntime-prebuilt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-onnxruntime-prebuilt) + * get,lib,onnxruntime,lang-cpp,_cuda + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['onnxruntime'], 'CM_MLPERF_DEVICE': ['gpu']}` + - CM script: [get-onnxruntime-prebuilt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-onnxruntime-prebuilt) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-qualcomm/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-qualcomm/_cm.yaml) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-qualcomm/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-qualcomm/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-qualcomm/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-qualcomm/_cm.yaml)*** + * compile,cpp-program + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_RUN': [True]}` + * CM names: `--adr.['compile-program']...` + - CM script: [compile-program](https://github.com/mlcommons/cm4mlops/tree/master/script/compile-program) + * benchmark-mlperf + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_RUN': ['yes', True]}` + * CM names: `--adr.['runner', 'mlperf-runner']...` + - CM script: [benchmark-program-mlperf](https://github.com/mlcommons/cm4mlops/tree/master/script/benchmark-program-mlperf) + * save,mlperf,inference,state + * CM names: `--adr.['save-mlperf-inference-state']...` + - CM script: [save-mlperf-inference-implementation-state](https://github.com/mlcommons/cm4mlops/tree/master/script/save-mlperf-inference-implementation-state) + +___ +### Script output +`cmr "reproduce mlcommons mlperf inference harness qualcomm-harness qualcomm kilt-harness kilt [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_DATASET_*` +* `CM_HW_NAME` +* `CM_IMAGENET_ACCURACY_DTYPE` +* `CM_MAX_EXAMPLES` +* `CM_MLPERF_*` +* `CM_ML_MODEL_*` +* `CM_SQUAD_ACCURACY_DTYPE` +#### New environment keys auto-detected from customize + +* `CM_DATASET_LIST` +* `CM_MLPERF_CONF` +* `CM_MLPERF_DEVICE` +* `CM_MLPERF_USER_CONF` \ No newline at end of file diff --git a/docs/Modular-MLPerf-inference-benchmark-pipeline/app-loadgen-generic-python.md b/docs/Modular-MLPerf-inference-benchmark-pipeline/app-loadgen-generic-python.md new file mode 100644 index 0000000000..d4b87036eb --- /dev/null +++ b/docs/Modular-MLPerf-inference-benchmark-pipeline/app-loadgen-generic-python.md @@ -0,0 +1,331 @@ +Automatically generated README for this automation recipe: **app-loadgen-generic-python** + +Category: **Modular MLPerf inference benchmark pipeline** + +License: **Apache 2.0** + +Developers: [Gaz Iqbal](https://www.linkedin.com/in/gaziqbal), [Arjun Suresh](https://www.linkedin.com/in/arjunsuresh), [Grigori Fursin](https://cKnowledge.org/gfursin) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-loadgen-generic-python,d3d949cc361747a6) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-loadgen-generic-python)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *python,app,generic,loadgen* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "python app generic loadgen" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=python,app,generic,loadgen` + +`cm run script --tags=python,app,generic,loadgen[,variations] [--input_flags]` + +*or* + +`cmr "python app generic loadgen"` + +`cmr "python app generic loadgen [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + + +#### Input Flags + +* --**modelpath**=Full path to file with model weights +* --**modelcodepath**=(for PyTorch models) Full path to file with model code and cmc.py +* --**modelcfgpath**=(for PyTorch models) Full path to JSON file with model cfg +* --**modelsamplepath**=(for PyTorch models) Full path to file with model sample in pickle format +* --**ep**=ONNX Execution provider +* --**scenario**=MLPerf LoadGen scenario +* --**samples**=Number of samples (*2*) +* --**runner**=MLPerf runner +* --**execmode**=MLPerf exec mode +* --**output_dir**=MLPerf output directory +* --**concurrency**=MLPerf concurrency +* --**intraop**=MLPerf intra op threads +* --**interop**=MLPerf inter op threads + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "modelpath":...} +``` +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'python,app,generic,loadgen' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="python,app,generic,loadgen"``` + +#### Run this script via Docker (beta) + +`cm docker script "python app generic loadgen[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_cmc` + - Environment variables: + - *CM_CUSTOM_MODEL_CMC*: `True` + - Workflow: + * `_custom,cmc` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,ml-model,cmc + - CM script: [get-ml-model-abtf-ssd-pytorch](https://github.com/mlcommons/cm4abtf/tree/master/script/get-ml-model-abtf-ssd-pytorch) + * `_custom,huggingface` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,ml-model,huggingface + * CM names: `--adr.['hf-downloader']...` + - CM script: [get-ml-model-huggingface-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-huggingface-zoo) + * `_huggingface` + - Environment variables: + - *CM_CUSTOM_MODEL_SOURCE*: `huggingface` + - Workflow: + * `_model-stub.#` + - Environment variables: + - *CM_ML_MODEL_STUB*: `#` + - Workflow: + +
+ + + * Group "**backend**" +
+ Click here to expand this section. + + * **`_onnxruntime`** (default) + - Environment variables: + - *CM_MLPERF_BACKEND*: `onnxruntime` + - Workflow: + * `_pytorch` + - Environment variables: + - *CM_MLPERF_BACKEND*: `pytorch` + - Workflow: + +
+ + + * Group "**device**" +
+ Click here to expand this section. + + * **`_cpu`** (default) + - Environment variables: + - *CM_MLPERF_DEVICE*: `cpu` + - *CM_MLPERF_EXECUTION_PROVIDER*: `CPUExecutionProvider` + - Workflow: + * `_cuda` + - Environment variables: + - *CM_MLPERF_DEVICE*: `gpu` + - *CM_MLPERF_EXECUTION_PROVIDER*: `CUDAExecutionProvider` + - Workflow: + +
+ + + * Group "**models**" +
+ Click here to expand this section. + + * `_custom` + - Environment variables: + - *CM_MODEL*: `custom` + - Workflow: + * `_resnet50` + - Environment variables: + - *CM_MODEL*: `resnet50` + - Workflow: + * `_retinanet` + - Environment variables: + - *CM_MODEL*: `retinanet` + - Workflow: + +
+ + +#### Default variations + +`_cpu,_onnxruntime` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--concurrency=value` → `CM_MLPERF_CONCURRENCY=value` +* `--ep=value` → `CM_MLPERF_EXECUTION_PROVIDER=value` +* `--execmode=value` → `CM_MLPERF_EXEC_MODE=value` +* `--interop=value` → `CM_MLPERF_INTEROP=value` +* `--intraop=value` → `CM_MLPERF_INTRAOP=value` +* `--loadgen_duration_sec=value` → `CM_MLPERF_LOADGEN_DURATION_SEC=value` +* `--loadgen_expected_qps=value` → `CM_MLPERF_LOADGEN_EXPECTED_QPS=value` +* `--modelcfg=value` → `CM_ML_MODEL_CFG=value` +* `--modelcfgpath=value` → `CM_ML_MODEL_CFG_WITH_PATH=value` +* `--modelcodepath=value` → `CM_ML_MODEL_CODE_WITH_PATH=value` +* `--modelpath=value` → `CM_ML_MODEL_FILE_WITH_PATH=value` +* `--modelsamplepath=value` → `CM_ML_MODEL_SAMPLE_WITH_PATH=value` +* `--output_dir=value` → `CM_MLPERF_OUTPUT_DIR=value` +* `--runner=value` → `CM_MLPERF_RUNNER=value` +* `--samples=value` → `CM_MLPERF_LOADGEN_SAMPLES=value` +* `--scenario=value` → `CM_MLPERF_LOADGEN_SCENARIO=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "concurrency":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_MLPERF_EXECUTION_MODE: `parallel` +* CM_MLPERF_BACKEND: `onnxruntime` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-loadgen-generic-python/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-python-lib,_psutil + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.numpy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,cuda + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_DEVICE': ['gpu']}` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * get,loadgen + * CM names: `--adr.['loadgen']...` + - CM script: [get-mlperf-inference-loadgen](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-loadgen) + * get,generic-python-lib,_onnxruntime + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['onnxruntime'], 'CM_MLPERF_DEVICE': ['cpu']}` + * CM names: `--adr.['onnxruntime']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_onnxruntime_gpu + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['onnxruntime'], 'CM_MLPERF_DEVICE': ['gpu']}` + * CM names: `--adr.['onnxruntime']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_onnx + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['onnxruntime']}` + * CM names: `--adr.['onnx']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torch + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['pytorch'], 'CM_MLPERF_DEVICE': ['cpu']}` + * CM names: `--adr.['torch']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torchvision + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['pytorch'], 'CM_MLPERF_DEVICE': ['cpu']}` + * CM names: `--adr.['torchvision']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torch_cuda + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['pytorch'], 'CM_MLPERF_DEVICE': ['gpu']}` + * CM names: `--adr.['torch']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torchvision_cuda + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['pytorch'], 'CM_MLPERF_DEVICE': ['gpu']}` + * CM names: `--adr.['torchvision']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,ml-model,resnet50,_onnx + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['resnet50']}` + - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) + * get,ml-model,retinanet,_onnx,_fp32 + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['retinanet']}` + - CM script: [get-ml-model-retinanet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet) + * get,ml-model,retinanet,_onnx,_fp32 + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['retinanet']}` + - CM script: [get-ml-model-retinanet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-loadgen-generic-python/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-loadgen-generic-python/_cm.yaml) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-loadgen-generic-python/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-loadgen-generic-python/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-loadgen-generic-python/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-loadgen-generic-python/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-loadgen-generic-python/_cm.yaml) + +___ +### Script output +`cmr "python app generic loadgen [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_MLPERF_*` +#### New environment keys auto-detected from customize diff --git a/docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-ctuning-cpp-tflite.md b/docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-ctuning-cpp-tflite.md new file mode 100644 index 0000000000..dce3fd1b7a --- /dev/null +++ b/docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-ctuning-cpp-tflite.md @@ -0,0 +1,382 @@ +Automatically generated README for this automation recipe: **app-mlperf-inference-ctuning-cpp-tflite** + +Category: **Modular MLPerf inference benchmark pipeline** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-mlperf-inference-ctuning-cpp-tflite,415904407cca404a) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-ctuning-cpp-tflite)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *app,mlperf,inference,tflite-cpp* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "app mlperf inference tflite-cpp" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=app,mlperf,inference,tflite-cpp` + +`cm run script --tags=app,mlperf,inference,tflite-cpp[,variations] [--input_flags]` + +*or* + +`cmr "app mlperf inference tflite-cpp"` + +`cmr "app mlperf inference tflite-cpp [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'app,mlperf,inference,tflite-cpp' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="app,mlperf,inference,tflite-cpp"``` + +#### Run this script via Docker (beta) + +`cm docker script "app mlperf inference tflite-cpp[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_armnn` + - Environment variables: + - *CM_MLPERF_TFLITE_USE_ARMNN*: `yes` + - *CM_TMP_LINK_LIBS*: `tensorflowlite,armnn` + - Workflow: + * `_armnn,tflite` + - Environment variables: + - *CM_MLPERF_BACKEND*: `armnn_tflite` + - *CM_MLPERF_BACKEND_VERSION*: `<<>>` + - *CM_MLPERF_SUT_NAME_IMPLEMENTATION_PREFIX*: `tflite_armnn_cpp` + - *CM_TMP_LINK_LIBS*: `tensorflowlite,armnn,armnnTfLiteParser` + - *CM_TMP_SRC_FOLDER*: `armnn` + - Workflow: + +
+ + + * Group "**backend**" +
+ Click here to expand this section. + + * `_tf` + - Environment variables: + - *CM_MLPERF_BACKEND*: `tf` + - Workflow: + * **`_tflite`** (default) + - Environment variables: + - *CM_MLPERF_BACKEND*: `tflite` + - *CM_MLPERF_BACKEND_VERSION*: `master` + - *CM_TMP_LINK_LIBS*: `tensorflowlite` + - *CM_TMP_SRC_FOLDER*: `src` + - Workflow: + +
+ + + * Group "**device**" +
+ Click here to expand this section. + + * **`_cpu`** (default) + - Environment variables: + - *CM_MLPERF_DEVICE*: `cpu` + - Workflow: + * `_gpu` + - Environment variables: + - *CM_MLPERF_DEVICE*: `gpu` + - *CM_MLPERF_DEVICE_LIB_NAMESPEC*: `cudart` + - Workflow: + +
+ + + * Group "**loadgen-scenario**" +
+ Click here to expand this section. + + * **`_singlestream`** (default) + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `SingleStream` + - Workflow: + +
+ + + * Group "**model**" +
+ Click here to expand this section. + + * `_efficientnet` + - Environment variables: + - *CM_MODEL*: `efficientnet` + - Workflow: + * `_mobilenet` + - Environment variables: + - *CM_MODEL*: `mobilenet` + - Workflow: + * **`_resnet50`** (default) + - Environment variables: + - *CM_MODEL*: `resnet50` + - Workflow: + +
+ + + * Group "**optimization-target**" +
+ Click here to expand this section. + + * `_use-neon` + - Environment variables: + - *CM_MLPERF_SUT_NAME_RUN_CONFIG_SUFFIX1*: `using_neon` + - *CM_MLPERF_TFLITE_USE_NEON*: `1` + - Workflow: + * `_use-opencl` + - Environment variables: + - *CM_MLPERF_SUT_NAME_RUN_CONFIG_SUFFIX1*: `using_opencl` + - *CM_MLPERF_TFLITE_USE_OPENCL*: `1` + - Workflow: + +
+ + + * Group "**precision**" +
+ Click here to expand this section. + + * **`_fp32`** (default) + - Environment variables: + - *CM_MLPERF_MODEL_PRECISION*: `float32` + - Workflow: + * `_int8` + - Environment variables: + - *CM_DATASET_COMPRESSED*: `on` + - *CM_MLPERF_MODEL_PRECISION*: `int8` + - Workflow: + * `_uint8` + - Environment variables: + - *CM_DATASET_COMPRESSED*: `on` + - *CM_MLPERF_MODEL_PRECISION*: `uint8` + - Workflow: + +
+ + +#### Default variations + +`_cpu,_fp32,_resnet50,_singlestream,_tflite` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--compressed_dataset=value` → `CM_DATASET_COMPRESSED=value` +* `--count=value` → `CM_MLPERF_LOADGEN_QUERY_COUNT=value` +* `--mlperf_conf=value` → `CM_MLPERF_CONF=value` +* `--mode=value` → `CM_MLPERF_LOADGEN_MODE=value` +* `--output_dir=value` → `CM_MLPERF_OUTPUT_DIR=value` +* `--performance_sample_count=value` → `CM_MLPERF_LOADGEN_PERFORMANCE_SAMPLE_COUNT=value` +* `--scenario=value` → `CM_MLPERF_LOADGEN_SCENARIO=value` +* `--user_conf=value` → `CM_MLPERF_USER_CONF=value` +* `--verbose=value` → `CM_VERBOSE=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "compressed_dataset":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_DATASET_COMPRESSED: `off` +* CM_DATASET_INPUT_SQUARE_SIDE: `224` +* CM_FAST_COMPILATION: `yes` +* CM_LOADGEN_BUFFER_SIZE: `1024` +* CM_MLPERF_LOADGEN_MODE: `accuracy` +* CM_MLPERF_LOADGEN_SCENARIO: `SingleStream` +* CM_MLPERF_LOADGEN_TRIGGER_COLD_RUN: `0` +* CM_MLPERF_OUTPUT_DIR: `.` +* CM_MLPERF_SUT_NAME_IMPLEMENTATION_PREFIX: `tflite_cpp` +* CM_MLPERF_TFLITE_USE_NEON: `0` +* CM_MLPERF_TFLITE_USE_OPENCL: `0` +* CM_ML_MODEL_GIVEN_CHANNEL_MEANS: `123.68 116.78 103.94` +* CM_ML_MODEL_NORMALIZE_DATA: `0` +* CM_ML_MODEL_SUBTRACT_MEANS: `1` +* CM_VERBOSE: `0` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-ctuning-cpp-tflite/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,cuda + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_DEVICE': ['gpu']}` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * get,loadgen + * CM names: `--adr.['loadgen']...` + - CM script: [get-mlperf-inference-loadgen](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-loadgen) + * get,mlcommons,inference,src + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,ml-model,mobilenet,raw,_tflite + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['tflite', 'armnn_tflite'], 'CM_MODEL': ['mobilenet']}` + * CM names: `--adr.['ml-model', 'tflite-model', 'mobilenet-model']...` + - CM script: [get-ml-model-mobilenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-mobilenet) + * get,ml-model,resnet50,raw,_tflite,_no-argmax + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['tflite', 'armnn_tflite'], 'CM_MODEL': ['resnet50']}` + * CM names: `--adr.['ml-model', 'tflite-model', 'resnet50-model']...` + - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) + * get,ml-model,resnet50,raw,_tf + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['tf'], 'CM_MODEL': ['resnet50']}` + * CM names: `--adr.['ml-model', 'tflite-model', 'resnet50-model']...` + - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) + * get,ml-model,efficientnet,raw,_tflite + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['tflite', 'armnn_tflite'], 'CM_MODEL': ['efficientnet']}` + * CM names: `--adr.['ml-model', 'tflite-model', 'efficientnet-model']...` + - CM script: [get-ml-model-efficientnet-lite](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-efficientnet-lite) + * get,tensorflow,lib,_tflite + - CM script: [install-tensorflow-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-tensorflow-from-src) + * get,lib,armnn + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_TFLITE_USE_ARMNN': ['yes']}` + * CM names: `--adr.['armnn', 'lib-armnn']...` + - CM script: [get-lib-armnn](https://github.com/mlcommons/cm4mlops/tree/master/script/get-lib-armnn) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-ctuning-cpp-tflite/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-ctuning-cpp-tflite/_cm.json)*** + * generate,user-conf,mlperf,inference + * CM names: `--adr.['user-conf-generator']...` + - CM script: [generate-mlperf-inference-user-conf](https://github.com/mlcommons/cm4mlops/tree/master/script/generate-mlperf-inference-user-conf) + * get,dataset,preprocessed,imagenet,_for.resnet50,_rgb32,_NHWC + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_RUN': ['no'], 'CM_MODEL': ['resnet50']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_DATASET_COMPRESSED': ['on']}` + * CM names: `--adr.['imagenet-preprocessed', 'preprocessed-dataset']...` + - CM script: [get-preprocessed-dataset-imagenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-imagenet) + * get,dataset,preprocessed,imagenet,_for.mobilenet,_rgb32,_NHWC + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_RUN': ['no'], 'CM_MODEL': ['mobilenet', 'efficientnet']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_DATASET_COMPRESSED': ['on']}` + * CM names: `--adr.['imagenet-preprocessed', 'preprocessed-dataset']...` + - CM script: [get-preprocessed-dataset-imagenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-imagenet) + * get,dataset,preprocessed,imagenet,_for.mobilenet,_rgb8,_NHWC + * Enable this dependency only if all ENV vars are set:
+`{'CM_DATASET_COMPRESSED': ['on'], 'CM_MLPERF_SKIP_RUN': ['no'], 'CM_MODEL': ['mobilenet', 'efficientnet']}` + * CM names: `--adr.['imagenet-preprocessed', 'preprocessed-dataset']...` + - CM script: [get-preprocessed-dataset-imagenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-imagenet) + * get,dataset,preprocessed,imagenet,_for.resnet50,_rgb8,_NHWC + * Enable this dependency only if all ENV vars are set:
+`{'CM_DATASET_COMPRESSED': ['on'], 'CM_MLPERF_SKIP_RUN': ['no'], 'CM_MODEL': ['resnet50']}` + * CM names: `--adr.['imagenet-preprocessed', 'preprocessed-dataset']...` + - CM script: [get-preprocessed-dataset-imagenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-imagenet) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-ctuning-cpp-tflite/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-ctuning-cpp-tflite/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-ctuning-cpp-tflite/_cm.json)*** + * compile,program + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_RUN': ['yes']}` + * CM names: `--adr.['compiler-program']...` + - CM script: [compile-program](https://github.com/mlcommons/cm4mlops/tree/master/script/compile-program) + * benchmark-mlperf + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_RUN': ['yes']}` + * CM names: `--adr.['mlperf-runner']...` + - CM script: [benchmark-program-mlperf](https://github.com/mlcommons/cm4mlops/tree/master/script/benchmark-program-mlperf) + * save,mlperf,inference,state + * CM names: `--adr.['save-mlperf-inference-state']...` + - CM script: [save-mlperf-inference-implementation-state](https://github.com/mlcommons/cm4mlops/tree/master/script/save-mlperf-inference-implementation-state) + +___ +### Script output +`cmr "app mlperf inference tflite-cpp [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_HW_NAME` +* `CM_MLPERF_*` +* `CM_ML_MODEL_*` +#### New environment keys auto-detected from customize + +* `CM_MLPERF_CONF` +* `CM_MLPERF_DEVICE` +* `CM_MLPERF_SUT_NAME_RUN_CONFIG_SUFFIX2` +* `CM_MLPERF_USER_CONF` \ No newline at end of file diff --git a/docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-mlcommons-cpp.md b/docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-mlcommons-cpp.md new file mode 100644 index 0000000000..35b59a51b0 --- /dev/null +++ b/docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-mlcommons-cpp.md @@ -0,0 +1,336 @@ +Automatically generated README for this automation recipe: **app-mlperf-inference-mlcommons-cpp** + +Category: **Modular MLPerf inference benchmark pipeline** + +License: **Apache 2.0** + +Developers: [Thomas Zhu](https://www.linkedin.com/in/hanwen-zhu-483614189), [Arjun Suresh](https://www.linkedin.com/in/arjunsuresh), [Grigori Fursin](https://cKnowledge.org/gfursin) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-mlperf-inference-mlcommons-cpp,bf62405e6c7a44bf) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-mlcommons-cpp)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *app,mlcommons,mlperf,inference,cpp* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "app mlcommons mlperf inference cpp" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=app,mlcommons,mlperf,inference,cpp` + +`cm run script --tags=app,mlcommons,mlperf,inference,cpp[,variations] [--input_flags]` + +*or* + +`cmr "app mlcommons mlperf inference cpp"` + +`cmr "app mlcommons mlperf inference cpp [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'app,mlcommons,mlperf,inference,cpp' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="app,mlcommons,mlperf,inference,cpp"``` + +#### Run this script via Docker (beta) + +`cm docker script "app mlcommons mlperf inference cpp[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_multistream,resnet50` + - Workflow: + * `_multistream,retinanet` + - Workflow: + * `_offline,resnet50` + - Workflow: + * `_resnet50,multistream` + - Workflow: + * `_resnet50,offline` + - Workflow: + * `_resnet50,server` + - Workflow: + +
+ + + * Group "**batch-size**" +
+ Click here to expand this section. + + * `_batch-size.#` + - Environment variables: + - *CM_MLPERF_LOADGEN_MAX_BATCHSIZE*: `#` + - Workflow: + +
+ + + * Group "**device**" +
+ Click here to expand this section. + + * **`_cpu`** (default) + - Environment variables: + - *CM_MLPERF_DEVICE*: `cpu` + - Workflow: + * `_cuda` + - Environment variables: + - *CM_MLPERF_DEVICE*: `gpu` + - *CM_MLPERF_DEVICE_LIB_NAMESPEC*: `cudart` + - Workflow: + +
+ + + * Group "**framework**" +
+ Click here to expand this section. + + * **`_onnxruntime`** (default) + - Environment variables: + - *CM_MLPERF_BACKEND*: `onnxruntime` + - *CM_MLPERF_BACKEND_LIB_NAMESPEC*: `onnxruntime` + - Workflow: + * `_pytorch` + - Environment variables: + - *CM_MLPERF_BACKEND*: `pytorch` + - Workflow: + * `_tf` + - Environment variables: + - *CM_MLPERF_BACKEND*: `tf` + - Workflow: + * `_tflite` + - Environment variables: + - *CM_MLPERF_BACKEND*: `tflite` + - Workflow: + * `_tvm-onnx` + - Environment variables: + - *CM_MLPERF_BACKEND*: `tvm-onnx` + - Workflow: + +
+ + + * Group "**loadgen-scenario**" +
+ Click here to expand this section. + + * `_multistream` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `MultiStream` + - Workflow: + * **`_offline`** (default) + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `Offline` + - Workflow: + * `_server` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `Server` + - Workflow: + * `_singlestream` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `SingleStream` + - *CM_MLPERF_LOADGEN_MAX_BATCHSIZE*: `1` + - Workflow: + +
+ + + * Group "**model**" +
+ Click here to expand this section. + + * **`_resnet50`** (default) + - Environment variables: + - *CM_MODEL*: `resnet50` + - Workflow: + * `_retinanet` + - Environment variables: + - *CM_MODEL*: `retinanet` + - Workflow: + +
+ + +#### Default variations + +`_cpu,_offline,_onnxruntime,_resnet50` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--count=value` → `CM_MLPERF_LOADGEN_QUERY_COUNT=value` +* `--max_batchsize=value` → `CM_MLPERF_LOADGEN_MAX_BATCHSIZE=value` +* `--mlperf_conf=value` → `CM_MLPERF_CONF=value` +* `--mode=value` → `CM_MLPERF_LOADGEN_MODE=value` +* `--output_dir=value` → `CM_MLPERF_OUTPUT_DIR=value` +* `--performance_sample_count=value` → `CM_MLPERF_LOADGEN_PERFORMANCE_SAMPLE_COUNT=value` +* `--scenario=value` → `CM_MLPERF_LOADGEN_SCENARIO=value` +* `--user_conf=value` → `CM_MLPERF_USER_CONF=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "count":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_BATCH_COUNT: `1` +* CM_BATCH_SIZE: `1` +* CM_FAST_COMPILATION: `yes` +* CM_MLPERF_SUT_NAME_IMPLEMENTATION_PREFIX: `cpp` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-mlcommons-cpp/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,cuda,_cudnn + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_DEVICE': ['gpu']}` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * get,loadgen + * CM names: `--adr.['loadgen']...` + - CM script: [get-mlperf-inference-loadgen](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-loadgen) + * get,mlcommons,inference,src + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,lib,onnxruntime,lang-cpp,_cpu + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['onnxruntime'], 'CM_MLPERF_DEVICE': ['cpu']}` + - CM script: [get-onnxruntime-prebuilt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-onnxruntime-prebuilt) + * get,lib,onnxruntime,lang-cpp,_cuda + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['onnxruntime'], 'CM_MLPERF_DEVICE': ['gpu']}` + - CM script: [get-onnxruntime-prebuilt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-onnxruntime-prebuilt) + * get,dataset,preprocessed,imagenet,_NCHW + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['resnet50']}` + * CM names: `--adr.['imagenet-preprocessed']...` + - CM script: [get-preprocessed-dataset-imagenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-imagenet) + * get,ml-model,raw,resnet50,_onnx + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['resnet50']}` + - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) + * get,dataset,preprocessed,openimages,_validation,_NCHW + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['retinanet']}` + * CM names: `--adr.['openimages-preprocessed']...` + - CM script: [get-preprocessed-dataset-openimages](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-openimages) + * get,ml-model,retinanet,_onnx,_fp32 + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['retinanet']}` + - CM script: [get-ml-model-retinanet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet) + * generate,user-conf,mlperf,inference + * CM names: `--adr.['user-conf-generator']...` + - CM script: [generate-mlperf-inference-user-conf](https://github.com/mlcommons/cm4mlops/tree/master/script/generate-mlperf-inference-user-conf) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-mlcommons-cpp/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-mlcommons-cpp/_cm.yaml) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-mlcommons-cpp/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-mlcommons-cpp/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-mlcommons-cpp/_cm.yaml)*** + * compile,cpp-program + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_RUN': ['yes']}` + * CM names: `--adr.['compile-program']...` + - CM script: [compile-program](https://github.com/mlcommons/cm4mlops/tree/master/script/compile-program) + * benchmark-mlperf + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_RUN': ['yes']}` + * CM names: `--adr.['mlperf-runner']...` + - CM script: [benchmark-program-mlperf](https://github.com/mlcommons/cm4mlops/tree/master/script/benchmark-program-mlperf) + * save,mlperf,inference,state + * CM names: `--adr.['save-mlperf-inference-state']...` + - CM script: [save-mlperf-inference-implementation-state](https://github.com/mlcommons/cm4mlops/tree/master/script/save-mlperf-inference-implementation-state) + +___ +### Script output +`cmr "app mlcommons mlperf inference cpp [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_DATASET_*` +* `CM_HW_NAME` +* `CM_MLPERF_*` +* `CM_ML_MODEL_*` +#### New environment keys auto-detected from customize + +* `CM_DATASET_LIST` +* `CM_MLPERF_CONF` +* `CM_MLPERF_DEVICE` +* `CM_MLPERF_USER_CONF` \ No newline at end of file diff --git a/docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-mlcommons-python.md b/docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-mlcommons-python.md new file mode 100644 index 0000000000..d8e825fc87 --- /dev/null +++ b/docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-mlcommons-python.md @@ -0,0 +1,944 @@ +Automatically generated README for this automation recipe: **app-mlperf-inference-mlcommons-python** + +Category: **Modular MLPerf inference benchmark pipeline** + +License: **Apache 2.0** + +Developers: [Arjun Suresh](https://www.linkedin.com/in/arjunsuresh), [Thomas Zhu](https://www.linkedin.com/in/hanwen-zhu-483614189), [Grigori Fursin](https://cKnowledge.org/gfursin) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-mlperf-inference-mlcommons-python,ff149e9781fc4b65) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- + +This portable CM script is being developed by the [MLCommons taskforce on automation and reproducibility](https://github.com/mlcommons/ck/blob/master/docs/mlperf-education-workgroup.md) +to modularize the *python reference implementations* of the [MLPerf inference benchmark](https://github.com/mlcommons/inference) +using the [MLCommons CM automation meta-framework](https://github.com/mlcommons/ck). +The goal is to make it easier to run, optimize and reproduce MLPerf benchmarks +across diverse platforms with continuously changing software and hardware. + +See the current coverage of different models, devices and backends [here](README-extra.md#current-coverage). + + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-mlcommons-python)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *app,vision,language,mlcommons,mlperf,inference,reference,ref* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "app vision language mlcommons mlperf inference reference ref" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=app,vision,language,mlcommons,mlperf,inference,reference,ref` + +`cm run script --tags=app,vision,language,mlcommons,mlperf,inference,reference,ref[,variations] [--input_flags]` + +*or* + +`cmr "app vision language mlcommons mlperf inference reference ref"` + +`cmr "app vision language mlcommons mlperf inference reference ref [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'app,vision,language,mlcommons,mlperf,inference,reference,ref' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="app,vision,language,mlcommons,mlperf,inference,reference,ref"``` + +#### Run this script via Docker (beta) + +`cm docker script "app vision language mlcommons mlperf inference reference ref[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *Internal group (variations should not be selected manually)* +
+ Click here to expand this section. + + * `_gptj_` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_torch + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.datasets + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.attrs + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.accelerate + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_llama2-70b_` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_package.transformers + * CM names: `--adr.['transformers']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.datasets + * CM names: `--adr.['datasets']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.sentencepiece + * CM names: `--adr.['sentencepiece']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.protobuf + * CM names: `--adr.['protobuf']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.accelerate + * CM names: `--adr.['accelerate']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.absl-py + * CM names: `--adr.['absl-py']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.evaluate + * CM names: `--adr.['evaluate']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.nltk + * CM names: `--adr.['nltk']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.rouge-score + * CM names: `--adr.['rouge-score']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + +
+ + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_3d-unet` + - Environment variables: + - *CM_TMP_IGNORE_MLPERF_QUERY_COUNT*: `True` + - *CM_MLPERF_MODEL_SKIP_BATCHING*: `True` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_package.nibabel + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_beam_size.#` + - Environment variables: + - *GPTJ_BEAM_SIZE*: `#` + - Workflow: + * `_bert` + - Environment variables: + - *CM_MLPERF_MODEL_SKIP_BATCHING*: `True` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_package.pydantic + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_tokenization + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_six + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.absl-py + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_protobuf + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['tf', 'tflite']}` + * CM names: `--adr.['protobuf']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_boto3 + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['pytorch']}` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torch + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_DEVICE': ['gpu']}` + * CM names: `--adr.['ml-engine-pytorch', 'pytorch']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_dlrm` + - Environment variables: + - *CM_MLPERF_MODEL_SKIP_BATCHING*: `True` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,dlrm,src + * CM names: `--adr.['dlrm-src']...` + - CM script: [get-dlrm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dlrm) + * get,generic-python-lib,_mlperf_logging + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_opencv-python + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_tensorboard + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_protobuf + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_scikit-learn + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_tqdm + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_onnx + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_numpy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.torchrec + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.pyre-extensions + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.torchsnapshot + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_llama2-70b_,cuda` + - Workflow: + * `_multistream` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `MultiStream` + - Workflow: + * `_offline` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `Offline` + - Workflow: + * `_onnxruntime,cpu` + - Environment variables: + - *CM_MLPERF_BACKEND_VERSION*: `<<>>` + - Workflow: + * `_onnxruntime,cuda` + - Environment variables: + - *CM_MLPERF_BACKEND_VERSION*: `<<>>` + - *ONNXRUNTIME_PREFERRED_EXECUTION_PROVIDER*: `CUDAExecutionProvider` + - Workflow: + * `_onnxruntime,rocm` + - Environment variables: + - *ONNXRUNTIME_PREFERRED_EXECUTION_PROVIDER*: `ROCMExecutionProvider` + - *CM_MLPERF_BACKEND_VERSION*: `<<>>` + - Workflow: + * `_pytorch,rocm` + - Workflow: + * `_r2.1_default` + - Environment variables: + - *CM_RERUN*: `yes` + - *CM_SKIP_SYS_UTILS*: `yes` + - *CM_TEST_QUERY_COUNT*: `100` + - Workflow: + * `_server` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `Server` + - Workflow: + * `_singlestream` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `SingleStream` + - Workflow: + * `_tf,rocm` + - Environment variables: + - *CM_MLPERF_BACKEND_VERSION*: `<<>>` + - Workflow: + * `_tpu,tflite` + - Workflow: + +
+ + + * Group "**batch-size**" +
+ Click here to expand this section. + + * `_batch_size.#` + - Environment variables: + - *CM_MLPERF_LOADGEN_MAX_BATCHSIZE*: `#` + - Workflow: + +
+ + + * Group "**device**" +
+ Click here to expand this section. + + * **`_cpu`** (default) + - Environment variables: + - *CM_MLPERF_DEVICE*: `cpu` + - *CUDA_VISIBLE_DEVICES*: `` + - *USE_CUDA*: `False` + - *USE_GPU*: `False` + - Workflow: + * `_cuda` + - Environment variables: + - *CM_MLPERF_DEVICE*: `gpu` + - *USE_CUDA*: `True` + - *USE_GPU*: `True` + - Workflow: + * `_rocm` + - Environment variables: + - *CM_MLPERF_DEVICE*: `rocm` + - *USE_GPU*: `True` + - Workflow: + * `_tpu` + - Environment variables: + - *CM_MLPERF_DEVICE*: `tpu` + - Workflow: + +
+ + + * Group "**framework**" +
+ Click here to expand this section. + + * `_deepsparse` + - Environment variables: + - *CM_MLPERF_BACKEND*: `deepsparse` + - *CM_MLPERF_BACKEND_VERSION*: `<<>>` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_deepsparse + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_HOST_PLATFORM_FLAVOR': ['aarch64']}` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.deepsparse-nightly + * Enable this dependency only if all ENV vars are set:
+`{'CM_HOST_PLATFORM_FLAVOR': ['aarch64']}` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_ncnn` + - Environment variables: + - *CM_MLPERF_BACKEND*: `ncnn` + - *CM_MLPERF_BACKEND_VERSION*: `<<>>` + - *CM_MLPERF_VISION_DATASET_OPTION*: `imagenet_pytorch` + - Workflow: + * **`_onnxruntime`** (default) + - Environment variables: + - *CM_MLPERF_BACKEND*: `onnxruntime` + - Workflow: + * `_pytorch` + - Environment variables: + - *CM_MLPERF_BACKEND*: `pytorch` + - *CM_MLPERF_BACKEND_VERSION*: `<<>>` + - Workflow: + * `_ray` + - Environment variables: + - *CM_MLPERF_BACKEND*: `ray` + - *CM_MLPERF_BACKEND_VERSION*: `<<>>` + - Workflow: + * `_tf` + - Aliases: `_tensorflow` + - Environment variables: + - *CM_MLPERF_BACKEND*: `tf` + - *CM_MLPERF_BACKEND_VERSION*: `<<>>` + - Workflow: + * `_tflite` + - Environment variables: + - *CM_MLPERF_BACKEND*: `tflite` + - *CM_MLPERF_BACKEND_VERSION*: `<<>>` + - *CM_MLPERF_VISION_DATASET_OPTION*: `imagenet_tflite_tpu` + - Workflow: + * `_tvm-onnx` + - Environment variables: + - *CM_MLPERF_BACKEND*: `tvm-onnx` + - *CM_MLPERF_BACKEND_VERSION*: `<<>>` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_onnx + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,tvm + * CM names: `--adr.['tvm']...` + - CM script: [get-tvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tvm) + * get,tvm-model,_onnx + * CM names: `--adr.['tvm-model']...` + - CM script: [get-tvm-model](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tvm-model) + * `_tvm-pytorch` + - Environment variables: + - *CM_MLPERF_BACKEND*: `tvm-pytorch` + - *CM_MLPERF_BACKEND_VERSION*: `<<>>` + - *CM_PREPROCESS_PYTORCH*: `yes` + - *MLPERF_TVM_TORCH_QUANTIZED_ENGINE*: `qnnpack` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_torch + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,tvm + * CM names: `--adr.['tvm']...` + - CM script: [get-tvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tvm) + * get,tvm-model,_pytorch + * CM names: `--adr.['tvm-model']...` + - CM script: [get-tvm-model](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tvm-model) + * `_tvm-tflite` + - Environment variables: + - *CM_MLPERF_BACKEND*: `tvm-tflite` + - *CM_MLPERF_BACKEND_VERSION*: `<<>>` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_tflite + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,tvm + * CM names: `--adr.['tvm']...` + - CM script: [get-tvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tvm) + * get,tvm-model,_tflite + * CM names: `--adr.['tvm-model']...` + - CM script: [get-tvm-model](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tvm-model) + +
+ + + * Group "**implementation**" +
+ Click here to expand this section. + + * **`_python`** (default) + - Environment variables: + - *CM_MLPERF_PYTHON*: `yes` + - *CM_MLPERF_IMPLEMENTATION*: `reference` + - Workflow: + +
+ + + * Group "**models**" +
+ Click here to expand this section. + + * `_3d-unet-99` + - Environment variables: + - *CM_MODEL*: `3d-unet-99` + - Workflow: + * `_3d-unet-99.9` + - Environment variables: + - *CM_MODEL*: `3d-unet-99.9` + - Workflow: + * `_bert-99` + - Environment variables: + - *CM_MODEL*: `bert-99` + - Workflow: + * `_bert-99.9` + - Environment variables: + - *CM_MODEL*: `bert-99.9` + - Workflow: + * `_dlrm-99` + - Environment variables: + - *CM_MODEL*: `dlrm-99` + - Workflow: + * `_dlrm-99.9` + - Environment variables: + - *CM_MODEL*: `dlrm-99.9` + - Workflow: + * `_gptj-99` + - Environment variables: + - *CM_MODEL*: `gptj-99` + - Workflow: + * `_gptj-99.9` + - Environment variables: + - *CM_MODEL*: `gptj-99.9` + - Workflow: + * `_llama2-70b-99` + - Environment variables: + - *CM_MODEL*: `llama2-70b-99` + - Workflow: + * `_llama2-70b-99.9` + - Environment variables: + - *CM_MODEL*: `llama2-70b-99.9` + - Workflow: + * **`_resnet50`** (default) + - Environment variables: + - *CM_MODEL*: `resnet50` + - *CM_MLPERF_USE_MLCOMMONS_RUN_SCRIPT*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_opencv-python + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_numpy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_pycocotools + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Read "prehook_deps" on other CM scripts*** + * get,generic-python-lib,_protobuf + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['tf', 'tflite']}` + * CM names: `--adr.['protobuf']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_retinanet` + - Environment variables: + - *CM_MODEL*: `retinanet` + - *CM_MLPERF_USE_MLCOMMONS_RUN_SCRIPT*: `yes` + - *CM_MLPERF_LOADGEN_MAX_BATCHSIZE*: `1` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_opencv-python + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_numpy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_pycocotools + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_rnnt` + - Environment variables: + - *CM_MODEL*: `rnnt` + - *CM_MLPERF_MODEL_SKIP_BATCHING*: `True` + - *CM_TMP_IGNORE_MLPERF_QUERY_COUNT*: `True` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_package.pydantic + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_librosa + * CM names: `--adr.['librosa']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_inflect + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_unidecode + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_toml + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_sdxl` + - Environment variables: + - *CM_MODEL*: `stable-diffusion-xl` + - *CM_NUM_THREADS*: `1` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_package.diffusers + * CM names: `--adr.['diffusers']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.transformers + * CM names: `--adr.['transformers']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.accelerate + * CM names: `--adr.['accelerate']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.torchmetrics + * CM names: `--adr.['torchmetrics']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.torch-fidelity + * CM names: `--adr.['torch-fidelity']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.open_clip_torch + * CM names: `--adr.['open-clip']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.opencv-python + * CM names: `--adr.['opencv-python']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.scipy + * CM names: `--adr.['scipy']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + +
+ + + * Group "**network**" +
+ Click here to expand this section. + + * `_network-lon` + - Environment variables: + - *CM_NETWORK_LOADGEN*: `lon` + - *CM_MLPERF_SUT_NAME_RUN_CONFIG_SUFFIX1*: `network_loadgen` + - Workflow: + * `_network-sut` + - Environment variables: + - *CM_MLPERF_SUT_NAME_RUN_CONFIG_SUFFIX1*: `network_sut` + - *CM_NETWORK_LOADGEN*: `sut` + - Workflow: + +
+ + + * Group "**precision**" +
+ Click here to expand this section. + + * `_bfloat16` + - Environment variables: + - *CM_MLPERF_QUANTIZATION*: `False` + - *CM_MLPERF_MODEL_PRECISION*: `bfloat16` + - Workflow: + * `_float16` + - Environment variables: + - *CM_MLPERF_QUANTIZATION*: `False` + - *CM_MLPERF_MODEL_PRECISION*: `float16` + - Workflow: + * **`_fp32`** (default) + - Environment variables: + - *CM_MLPERF_QUANTIZATION*: `False` + - *CM_MLPERF_MODEL_PRECISION*: `float32` + - Workflow: + * `_int8` + - Aliases: `_quantized` + - Environment variables: + - *CM_MLPERF_QUANTIZATION*: `True` + - *CM_MLPERF_MODEL_PRECISION*: `int8` + - Workflow: + +
+ + +#### Default variations + +`_cpu,_fp32,_onnxruntime,_python,_resnet50` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--clean=value` → `CM_MLPERF_CLEAN_SUBMISSION_DIR=value` +* `--count=value` → `CM_MLPERF_LOADGEN_QUERY_COUNT=value` +* `--dataset=value` → `CM_MLPERF_VISION_DATASET_OPTION=value` +* `--dataset_args=value` → `CM_MLPERF_EXTRA_DATASET_ARGS=value` +* `--docker=value` → `CM_RUN_DOCKER_CONTAINER=value` +* `--hw_name=value` → `CM_HW_NAME=value` +* `--imagenet_path=value` → `IMAGENET_PATH=value` +* `--max_amps=value` → `CM_MLPERF_POWER_MAX_AMPS=value` +* `--max_batchsize=value` → `CM_MLPERF_LOADGEN_MAX_BATCHSIZE=value` +* `--max_volts=value` → `CM_MLPERF_POWER_MAX_VOLTS=value` +* `--mode=value` → `CM_MLPERF_LOADGEN_MODE=value` +* `--model=value` → `CM_MLPERF_CUSTOM_MODEL_PATH=value` +* `--multistream_target_latency=value` → `CM_MLPERF_LOADGEN_MULTISTREAM_TARGET_LATENCY=value` +* `--network=value` → `CM_NETWORK_LOADGEN=value` +* `--ntp_server=value` → `CM_MLPERF_POWER_NTP_SERVER=value` +* `--num_threads=value` → `CM_NUM_THREADS=value` +* `--offline_target_qps=value` → `CM_MLPERF_LOADGEN_OFFLINE_TARGET_QPS=value` +* `--output_dir=value` → `OUTPUT_BASE_DIR=value` +* `--power=value` → `CM_MLPERF_POWER=value` +* `--power_server=value` → `CM_MLPERF_POWER_SERVER_ADDRESS=value` +* `--regenerate_files=value` → `CM_REGENERATE_MEASURE_FILES=value` +* `--rerun=value` → `CM_RERUN=value` +* `--scenario=value` → `CM_MLPERF_LOADGEN_SCENARIO=value` +* `--server_target_qps=value` → `CM_MLPERF_LOADGEN_SERVER_TARGET_QPS=value` +* `--singlestream_target_latency=value` → `CM_MLPERF_LOADGEN_SINGLESTREAM_TARGET_LATENCY=value` +* `--sut_servers=value` → `CM_NETWORK_LOADGEN_SUT_SERVERS=value` +* `--target_latency=value` → `CM_MLPERF_LOADGEN_TARGET_LATENCY=value` +* `--target_qps=value` → `CM_MLPERF_LOADGEN_TARGET_QPS=value` +* `--test_query_count=value` → `CM_TEST_QUERY_COUNT=value` +* `--threads=value` → `CM_NUM_THREADS=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "clean":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_MLPERF_LOADGEN_MODE: `accuracy` +* CM_MLPERF_LOADGEN_SCENARIO: `Offline` +* CM_OUTPUT_FOLDER_NAME: `test_results` +* CM_MLPERF_RUN_STYLE: `test` +* CM_TEST_QUERY_COUNT: `10` +* CM_MLPERF_QUANTIZATION: `False` +* CM_MLPERF_SUT_NAME_IMPLEMENTATION_PREFIX: `reference` +* CM_MLPERF_SUT_NAME_RUN_CONFIG_SUFFIX: `` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-mlcommons-python/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,python + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,cuda,_cudnn + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_DEVICE': ['gpu'], 'CM_MLPERF_BACKEND': ['onnxruntime', 'tf', 'tflite', 'pytorch']}` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * get,nvidia,tensorrt + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['tensorrt']}` + - CM script: [get-tensorrt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tensorrt) + * get,generic-python-lib,_onnxruntime + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['onnxruntime', 'tvm-onnx'], 'CM_MLPERF_DEVICE': ['cpu', 'rocm']}` + * CM names: `--adr.['ml-engine-onnxruntime', 'onnxruntime']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_onnxruntime_gpu + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['onnxruntime', 'tvm-onnx'], 'CM_MLPERF_DEVICE': ['gpu']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MODEL': ['3d-unet-99', '3d-unet-99.9']}` + * CM names: `--adr.['ml-engine-onnxruntime-cuda']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_onnxruntime + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['onnxruntime'], 'CM_MLPERF_DEVICE': ['gpu'], 'CM_MODEL': ['3d-unet-99', '3d-unet-99.9', 'resnet50']}` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_onnxruntime_gpu + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['onnxruntime'], 'CM_MLPERF_DEVICE': ['gpu'], 'CM_MODEL': ['3d-unet-99', '3d-unet-99.9', 'resnet50']}` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torch + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['pytorch', 'tvm-pytorch'], 'CM_MLPERF_DEVICE': ['cpu', 'rocm']}` + * CM names: `--adr.['ml-engine-pytorch', 'pytorch']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torch_cuda + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['pytorch', 'tvm-pytorch', 'ray'], 'CM_MLPERF_DEVICE': ['gpu']}` + * CM names: `--adr.['ml-engine-pytorch', 'pytorch']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torchvision + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['pytorch', 'tvm-pytorch'], 'CM_MLPERF_DEVICE': ['cpu']}` + * CM names: `--adr.['ml-engine-torchvision']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torchvision_cuda + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['pytorch', 'tvm-pytorch', 'ray'], 'CM_MLPERF_DEVICE': ['gpu']}` + * CM names: `--adr.['ml-engine-torchvision']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_tensorrt + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['ray']}` + * CM names: `--adr.['ml-engine-tensorrt']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torch_tensorrt + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['ray']}` + * CM names: `--adr.['ml-engine-torch_tensorrt']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_ray + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['ray']}` + * CM names: `--adr.['ray']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_async_timeout + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['ray']}` + * CM names: `--adr.['async_timeout']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_transformers + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['bert-99', 'bert-99.9', 'gptj-99', 'gptj-99.9']}` + * CM names: `--adr.['ml-engine-transformers']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_tensorflow + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['tf', 'tflite']}` + * CM names: `--adr.['ml-engine-tensorflow', 'tensorflow']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.ncnn + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['ncnn']}` + * CM names: `--adr.['ml-engine-ncnn']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,ml-model,neural-magic,zoo + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_NEURALMAGIC_MODEL_ZOO_STUB': ['on']}` + * CM names: `--adr.['custom-ml-model']...` + - CM script: [get-ml-model-neuralmagic-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-neuralmagic-zoo) + * get,ml-model,image-classification,resnet50 + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['resnet50']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_CUSTOM_MODEL_PATH': ['on']}` + * CM names: `--adr.['ml-model', 'resnet50-model']...` + - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) + * get,ml-model,object-detection,retinanet + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['retinanet']}` + * CM names: `--adr.['ml-model', 'retinanet-model']...` + - CM script: [get-ml-model-retinanet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet) + * get,ml-model,large-language-model,gptj + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['gptj-99', 'gptj-99.9']}` + * CM names: `--adr.['ml-model', 'gptj-model', 'gpt-j-model']...` + - CM script: [get-ml-model-gptj](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-gptj) + * get,ml-model,object-detection,resnext50,fp32,_pytorch-weights + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['pytorch'], 'CM_MLPERF_IMPLEMENTATION': ['nvidia'], 'CM_MODEL': ['retinanet']}` + * CM names: `--adr.['ml-model', 'retinanet-model']...` + - *Warning: no scripts found* + * get,ml-model,language-processing,bert-large + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['bert-99', 'bert-99.9']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_CUSTOM_MODEL_PATH': ['on']}` + * CM names: `--adr.['ml-model', 'bert-model']...` + - CM script: [get-ml-model-bert-large-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-bert-large-squad) + * get,ml-model,stable-diffusion,text-to-image,sdxl + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['stable-diffusion-xl']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_CUSTOM_MODEL_PATH': ['on']}` + * CM names: `--adr.['ml-model', 'sdxl-model']...` + - CM script: [get-ml-model-stable-diffusion](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-stable-diffusion) + * get,ml-model,llama2 + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['llama2-70b-99', 'llama2-70b-99.9']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_CUSTOM_MODEL_PATH': ['on']}` + * CM names: `--adr.['ml-model', 'llama2-model']...` + - CM script: [get-ml-model-llama2](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-llama2) + * get,ml-model,medical-imaging,3d-unet + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['3d-unet-99', '3d-unet-99.9']}` + * CM names: `--adr.['ml-model', '3d-unet-model']...` + - CM script: [get-ml-model-3d-unet-kits19](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-3d-unet-kits19) + * get,ml-model,speech-recognition,rnnt + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['rnnt']}` + * CM names: `--adr.['ml-model', 'rnnt-model']...` + - CM script: [get-ml-model-rnnt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-rnnt) + * get,ml-model,recommendation,dlrm + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['dlrm-99', 'dlrm-99.9']}` + * CM names: `--adr.['ml-model', 'dlrm-model']...` + - CM script: [get-ml-model-dlrm-terabyte](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-dlrm-terabyte) + * get,dataset,image-classification,imagenet,preprocessed + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['resnet50']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_VISION_DATASET_OPTION': [True]}` + * CM names: `--adr.['imagenet-preprocessed']...` + - CM script: [get-preprocessed-dataset-imagenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-imagenet) + * get,dataset,image-classification,imagenet,preprocessed,_pytorch + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['resnet50'], 'CM_MLPERF_VISION_DATASET_OPTION': ['imagenet_pytorch']}` + * CM names: `--adr.['imagenet-preprocessed']...` + - CM script: [get-preprocessed-dataset-imagenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-imagenet) + * get,dataset-aux,image-classification,imagenet-aux + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['resnet50']}` + - CM script: [get-dataset-imagenet-aux](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-aux) + * get,dataset,object-detection,open-images,openimages,preprocessed,_validation + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['retinanet']}` + * CM names: `--adr.['openimages-preprocessed']...` + - CM script: [get-preprocessed-dataset-openimages](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-openimages) + * get,dataset,cnndm,_validation + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['gptj-99', 'gptj-99.9']}` + * CM names: `--adr.['cnndm-preprocessed']...` + - CM script: [get-dataset-cnndm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-cnndm) + * get,dataset,squad,original + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['bert-99', 'bert-99.9']}` + * CM names: `--adr.['cnndm-preprocessed']...` + - CM script: [get-dataset-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-squad) + * get,dataset-aux,squad-vocab + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['bert-99', 'bert-99.9']}` + - CM script: [get-dataset-squad-vocab](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-squad-vocab) + * get,dataset,coco2014,_validation + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['stable-diffusion-xl']}` + * CM names: `--adr.['coco2014-preprocessed']...` + - CM script: [get-dataset-coco2014](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-coco2014) + * get,preprocessed,dataset,openorca,_validation + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['llama2-70b-99', 'llama2-70b-99.9']}` + * CM names: `--adr.['openorca-preprocessed']...` + - CM script: [get-preprocessed-dataset-openorca](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-openorca) + * get,dataset,kits19,preprocessed + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['3d-unet-99', '3d-unet-99.9']}` + * CM names: `--adr.['kits19-preprocessed']...` + - CM script: [get-preprocessed-dataset-kits19](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-kits19) + * get,dataset,librispeech,preprocessed + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['rnnt']}` + * CM names: `--adr.['librispeech-preprocessed']...` + - CM script: [get-preprocessed-dataset-librispeech](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-librispeech) + * get,dataset,criteo,preprocessed + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['dlrm-99', 'dlrm-99.9']}` + * CM names: `--adr.['criteo-preprocessed']...` + - CM script: [get-preprocessed-dataset-criteo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-criteo) + * generate,user-conf,mlperf,inference + * CM names: `--adr.['user-conf-generator']...` + - CM script: [generate-mlperf-inference-user-conf](https://github.com/mlcommons/cm4mlops/tree/master/script/generate-mlperf-inference-user-conf) + * get,loadgen + * CM names: `--adr.['loadgen', 'mlperf-inference-loadgen']...` + - CM script: [get-mlperf-inference-loadgen](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-loadgen) + * get,mlcommons,inference,src + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,mlcommons,inference,src + * CM names: `--adr.['mlperf-implementation']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,generic-python-lib,_package.psutil + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-mlcommons-python/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-mlcommons-python/_cm.yaml)*** + * remote,run,cmds + * Enable this dependency only if all ENV vars are set:
+`{'CM_ASSH_RUN_COMMANDS': ['on']}` + * CM names: `--adr.['remote-run-cmds']...` + - CM script: [remote-run-commands](https://github.com/mlcommons/cm4mlops/tree/master/script/remote-run-commands) + 1. ***Run native script if exists*** + 1. ***Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-mlcommons-python/_cm.yaml)*** + * benchmark-mlperf + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_RUN': ['on']}` + * CM names: `--adr.['mlperf-runner']...` + - CM script: [benchmark-program-mlperf](https://github.com/mlcommons/cm4mlops/tree/master/script/benchmark-program-mlperf) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-mlcommons-python/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-mlcommons-python/_cm.yaml)*** + * save,mlperf,inference,state + * CM names: `--adr.['save-mlperf-inference-state']...` + - CM script: [save-mlperf-inference-implementation-state](https://github.com/mlcommons/cm4mlops/tree/master/script/save-mlperf-inference-implementation-state) + +___ +### Script output +`cmr "app vision language mlcommons mlperf inference reference ref [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_DATASET_*` +* `CM_HW_NAME` +* `CM_MAX_EXAMPLES` +* `CM_MLPERF_*` +* `CM_ML_MODEL_*` +#### New environment keys auto-detected from customize + +* `CM_MLPERF_BACKEND` +* `CM_MLPERF_CONF` +* `CM_MLPERF_DEVICE` +* `CM_MLPERF_LOADGEN_EXTRA_OPTIONS` +* `CM_MLPERF_LOADGEN_MODE` +* `CM_MLPERF_LOADGEN_QPS_OPT` +* `CM_MLPERF_LOADGEN_SCENARIO` +* `CM_MLPERF_OUTPUT_DIR` +* `CM_MLPERF_RUN_CMD` +* `CM_ML_MODEL_FILE_WITH_PATH` \ No newline at end of file diff --git a/docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference.md b/docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference.md new file mode 100644 index 0000000000..819393fee3 --- /dev/null +++ b/docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference.md @@ -0,0 +1,805 @@ +Automatically generated README for this automation recipe: **app-mlperf-inference** + +Category: **Modular MLPerf inference benchmark pipeline** + +License: **Apache 2.0** + +Developers: [Arjun Suresh](https://www.linkedin.com/in/arjunsuresh), [Thomas Zhu](https://www.linkedin.com/in/hanwen-zhu-483614189), [Grigori Fursin](https://cKnowledge.org/gfursin) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-mlperf-inference,d775cac873ee4231) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- + +This CM script provides a unified interface to prepare and run a modular version of the [MLPerf inference benchmark](https://arxiv.org/abs/1911.02549) +across diverse ML models, data sets, frameworks, libraries, run-time systems and platforms +using the [cross-platform automation meta-framework (MLCommons CM)](https://github.com/mlcommons/ck). + +It is assembled from reusable and interoperable [CM scripts for DevOps and MLOps](../list_of_scripts.md) +being developed by the [open MLCommons taskforce on automation and reproducibility](../mlperf-education-workgroup.md). + +It is a higher-level wrapper to several other CM scripts modularizing the MLPerf inference benchmark: +* [Reference Python implementation](../app-mlperf-inference-reference) +* [Universal C++ implementation](../app-mlperf-inference-cpp) +* [TFLite C++ implementation](../app-mlperf-inference-tflite-cpp) +* [NVidia optimized implementation](app-mlperf-inference-nvidia) + +See [this SCC'23 tutorial](https://github.com/mlcommons/ck/blob/master/docs/tutorials/sc22-scc-mlperf.md) +to use this script to run a reference (unoptimized) Python implementation of the MLPerf object detection benchmark +with RetinaNet model, Open Images dataset, ONNX runtime and CPU target. + +See this [CM script](../run-mlperf-inference-app) to automate and validate your MLPerf inference submission. + +Get in touch with the [open taskforce on automation and reproducibility at MLCommons](https://github.com/mlcommons/ck/blob/master/docs/mlperf-education-workgroup.md) +if you need help with your submission or if you would like to participate in further modularization of MLPerf +and collaborative design space exploration and optimization of ML Systems. + + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *app,vision,language,mlcommons,mlperf,inference,generic* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "app vision language mlcommons mlperf inference generic" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=app,vision,language,mlcommons,mlperf,inference,generic` + +`cm run script --tags=app,vision,language,mlcommons,mlperf,inference,generic[,variations] [--input_flags]` + +*or* + +`cmr "app vision language mlcommons mlperf inference generic"` + +`cmr "app vision language mlcommons mlperf inference generic [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + + +#### Input Flags + +* --**scenario**=MLPerf inference scenario {Offline,Server,SingleStream,MultiStream} (*Offline*) +* --**mode**=MLPerf inference mode {performance,accuracy} (*accuracy*) +* --**test_query_count**=Specifies the number of samples to be processed during a test run +* --**target_qps**=Target QPS +* --**target_latency**=Target Latency +* --**max_batchsize**=Maximum batchsize to be used +* --**num_threads**=Number of CPU threads to launch the application with +* --**hw_name**=Valid value - any system description which has a config file (under same name) defined [here](https://github.com/mlcommons/cm4mlops/tree/main/script/get-configs-sut-mlperf-inference/configs) +* --**output_dir**=Location where the outputs are produced +* --**rerun**=Redo the run even if previous run files exist (*True*) +* --**regenerate_files**=Regenerates measurement files including accuracy.txt files even if a previous run exists. This option is redundant if `--rerun` is used +* --**adr.python.name**=Python virtual environment name (optional) (*mlperf*) +* --**adr.python.version_min**=Minimal Python version (*3.8*) +* --**adr.python.version**=Force Python version (must have all system deps) +* --**adr.compiler.tags**=Compiler for loadgen (*gcc*) +* --**adr.inference-src-loadgen.env.CM_GIT_URL**=Git URL for MLPerf inference sources to build LoadGen (to enable non-reference implementations) +* --**adr.inference-src.env.CM_GIT_URL**=Git URL for MLPerf inference sources to run benchmarks (to enable non-reference implementations) +* --**quiet**=Quiet run (select default values for all questions) (*False*) +* --**readme**=Generate README with the reproducibility report +* --**debug**=Debug MLPerf script + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "scenario":...} +``` +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'app,vision,language,mlcommons,mlperf,inference,generic' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="app,vision,language,mlcommons,mlperf,inference,generic"``` + +#### Run this script via Docker (beta) + +`cm docker script "app vision language mlcommons mlperf inference generic[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * Group "**implementation**" +
+ Click here to expand this section. + + * `_cpp` + - Aliases: `_mil,_mlcommons-cpp` + - Environment variables: + - *CM_MLPERF_CPP*: `yes` + - *CM_MLPERF_IMPLEMENTATION*: `mlcommons_cpp` + - *CM_IMAGENET_ACCURACY_DTYPE*: `float32` + - *CM_OPENIMAGES_ACCURACY_DTYPE*: `float32` + - Workflow: + 1. ***Read "prehook_deps" on other CM scripts*** + * app,mlperf,cpp,inference + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_SKIP_RUN': [True]}` + * CM names: `--adr.['cpp-mlperf-inference', 'mlperf-inference-implementation']...` + - CM script: [app-mlperf-inference-mlcommons-cpp](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-mlcommons-cpp) + * `_intel-original` + - Aliases: `_intel` + - Environment variables: + - *CM_MLPERF_IMPLEMENTATION*: `intel` + - Workflow: + 1. ***Read "prehook_deps" on other CM scripts*** + * reproduce,mlperf,inference,intel + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_SKIP_RUN': [True]}` + * CM names: `--adr.['intel', 'intel-harness', 'mlperf-inference-implementation']...` + - CM script: [app-mlperf-inference-intel](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-intel) + * `_kilt` + - Aliases: `_qualcomm` + - Environment variables: + - *CM_MLPERF_IMPLEMENTATION*: `qualcomm` + - Workflow: + 1. ***Read "prehook_deps" on other CM scripts*** + * reproduce,mlperf,inference,kilt + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_SKIP_RUN': [True]}` + * CM names: `--adr.['kilt', 'kilt-harness', 'mlperf-inference-implementation']...` + - CM script: [app-mlperf-inference-qualcomm](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-qualcomm) + * `_nvidia-original` + - Aliases: `_nvidia` + - Environment variables: + - *CM_MLPERF_IMPLEMENTATION*: `nvidia` + - *CM_SQUAD_ACCURACY_DTYPE*: `float16` + - *CM_IMAGENET_ACCURACY_DTYPE*: `int32` + - *CM_CNNDM_ACCURACY_DTYPE*: `int32` + - *CM_LIBRISPEECH_ACCURACY_DTYPE*: `int8` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,cuda-devices + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_CUDA_DEVICE_PROP_GLOBAL_MEMORY': ['yes', 'on']}` + - CM script: [get-cuda-devices](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda-devices) + 1. ***Read "prehook_deps" on other CM scripts*** + * reproduce,mlperf,nvidia,inference,_run_harness + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_SKIP_RUN': [True]}` + * CM names: `--adr.['nvidia-original-mlperf-inference', 'nvidia-harness', 'mlperf-inference-implementation']...` + - CM script: [app-mlperf-inference-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-nvidia) + * **`_reference`** (default) + - Aliases: `_mlcommons-python,_python` + - Environment variables: + - *CM_MLPERF_PYTHON*: `yes` + - *CM_MLPERF_IMPLEMENTATION*: `mlcommons_python` + - *CM_SQUAD_ACCURACY_DTYPE*: `float32` + - *CM_IMAGENET_ACCURACY_DTYPE*: `float32` + - *CM_OPENIMAGES_ACCURACY_DTYPE*: `float32` + - *CM_LIBRISPEECH_ACCURACY_DTYPE*: `float32` + - Workflow: + 1. ***Read "prehook_deps" on other CM scripts*** + * app,mlperf,reference,inference + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_SKIP_RUN': [True]}` + * CM names: `--adr.['python-reference-mlperf-inference', 'mlperf-inference-implementation']...` + - CM script: [app-mlperf-inference-mlcommons-python](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-mlcommons-python) + * `_tflite-cpp` + - Aliases: `_ctuning-cpp-tflite` + - Environment variables: + - *CM_MLPERF_TFLITE_CPP*: `yes` + - *CM_MLPERF_CPP*: `yes` + - *CM_MLPERF_IMPLEMENTATION*: `ctuning_cpp_tflite` + - *CM_IMAGENET_ACCURACY_DTYPE*: `float32` + - Workflow: + 1. ***Read "prehook_deps" on other CM scripts*** + * app,mlperf,tflite-cpp,inference + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_SKIP_RUN': [True]}` + * CM names: `--adr.['tflite-cpp-mlperf-inference', 'mlperf-inference-implementation']...` + - CM script: [app-mlperf-inference-ctuning-cpp-tflite](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-ctuning-cpp-tflite) + +
+ + + * Group "**backend**" +
+ Click here to expand this section. + + * `_deepsparse` + - Environment variables: + - *CM_MLPERF_BACKEND*: `deepsparse` + - Workflow: + * `_glow` + - Environment variables: + - *CM_MLPERF_BACKEND*: `glow` + - Workflow: + * `_ncnn` + - Environment variables: + - *CM_MLPERF_BACKEND*: `ncnn` + - Workflow: + * `_onnxruntime` + - Environment variables: + - *CM_MLPERF_BACKEND*: `onnxruntime` + - Workflow: + * `_pytorch` + - Environment variables: + - *CM_MLPERF_BACKEND*: `pytorch` + - Workflow: + * `_ray` + - Environment variables: + - *CM_MLPERF_BACKEND*: `ray` + - Workflow: + * `_tensorrt` + - Environment variables: + - *CM_MLPERF_BACKEND*: `tensorrt` + - Workflow: + * `_tf` + - Environment variables: + - *CM_MLPERF_BACKEND*: `tf` + - Workflow: + * `_tflite` + - Environment variables: + - *CM_MLPERF_BACKEND*: `tflite` + - Workflow: + * `_tvm-onnx` + - Environment variables: + - *CM_MLPERF_BACKEND*: `tvm-onnx` + - Workflow: + * `_tvm-pytorch` + - Environment variables: + - *CM_MLPERF_BACKEND*: `tvm-pytorch` + - Workflow: + * `_tvm-tflite` + - Environment variables: + - *CM_MLPERF_BACKEND*: `tvm-tflite` + - Workflow: + +
+ + + * Group "**device**" +
+ Click here to expand this section. + + * **`_cpu`** (default) + - Environment variables: + - *CM_MLPERF_DEVICE*: `cpu` + - Workflow: + * `_cuda` + - Environment variables: + - *CM_MLPERF_DEVICE*: `gpu` + - Workflow: + * `_qaic` + - Environment variables: + - *CM_MLPERF_DEVICE*: `qaic` + - Workflow: + * `_rocm` + - Environment variables: + - *CM_MLPERF_DEVICE*: `rocm` + - Workflow: + * `_tpu` + - Environment variables: + - *CM_MLPERF_DEVICE*: `tpu` + - Workflow: + +
+ + + * Group "**model**" +
+ Click here to expand this section. + + * `_3d-unet-99` + - Environment variables: + - *CM_MODEL*: `3d-unet-99` + - Workflow: + * `_3d-unet-99.9` + - Environment variables: + - *CM_MODEL*: `3d-unet-99.9` + - Workflow: + * `_bert-99` + - Environment variables: + - *CM_MODEL*: `bert-99` + - Workflow: + * `_bert-99.9` + - Environment variables: + - *CM_MODEL*: `bert-99.9` + - Workflow: + * `_dlrm-v2-99` + - Environment variables: + - *CM_MODEL*: `dlrm-v2-99` + - Workflow: + * `_dlrm-v2-99.9` + - Environment variables: + - *CM_MODEL*: `dlrm-v2-99.9` + - Workflow: + * `_efficientnet` + - Environment variables: + - *CM_MODEL*: `efficientnet` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,dataset-aux,imagenet-aux + - CM script: [get-dataset-imagenet-aux](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-aux) + 1. ***Read "posthook_deps" on other CM scripts*** + * run,accuracy,mlperf,_imagenet + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_LOADGEN_MODE': ['accuracy', 'all'], 'CM_MLPERF_ACCURACY_RESULTS_DIR': ['on']}` + * CM names: `--adr.['mlperf-accuracy-script', 'imagenet-accuracy-script']...` + - CM script: [process-mlperf-accuracy](https://github.com/mlcommons/cm4mlops/tree/master/script/process-mlperf-accuracy) + * `_gptj-99` + - Environment variables: + - *CM_MODEL*: `gptj-99` + - Workflow: + * `_gptj-99.9` + - Environment variables: + - *CM_MODEL*: `gptj-99.9` + - Workflow: + * `_llama2-70b-99` + - Environment variables: + - *CM_MODEL*: `llama2-70b-99` + - Workflow: + * `_llama2-70b-99.9` + - Environment variables: + - *CM_MODEL*: `llama2-70b-99.9` + - Workflow: + * `_mobilenet` + - Environment variables: + - *CM_MODEL*: `mobilenet` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,dataset-aux,imagenet-aux + - CM script: [get-dataset-imagenet-aux](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-aux) + 1. ***Read "posthook_deps" on other CM scripts*** + * run,accuracy,mlperf,_imagenet + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_LOADGEN_MODE': ['accuracy', 'all'], 'CM_MLPERF_ACCURACY_RESULTS_DIR': ['on']}` + * CM names: `--adr.['mlperf-accuracy-script', 'imagenet-accuracy-script']...` + - CM script: [process-mlperf-accuracy](https://github.com/mlcommons/cm4mlops/tree/master/script/process-mlperf-accuracy) + * **`_resnet50`** (default) + - Environment variables: + - *CM_MODEL*: `resnet50` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,dataset-aux,imagenet-aux + - CM script: [get-dataset-imagenet-aux](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-aux) + 1. ***Read "posthook_deps" on other CM scripts*** + * run,accuracy,mlperf,_imagenet + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_LOADGEN_MODE': ['accuracy', 'all'], 'CM_MLPERF_ACCURACY_RESULTS_DIR': ['on']}` + * CM names: `--adr.['mlperf-accuracy-script', 'imagenet-accuracy-script']...` + - CM script: [process-mlperf-accuracy](https://github.com/mlcommons/cm4mlops/tree/master/script/process-mlperf-accuracy) + * `_retinanet` + - Environment variables: + - *CM_MODEL*: `retinanet` + - Workflow: + 1. ***Read "posthook_deps" on other CM scripts*** + * run,accuracy,mlperf,_openimages + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_LOADGEN_MODE': ['accuracy', 'all'], 'CM_MLPERF_ACCURACY_RESULTS_DIR': ['on']}` + * CM names: `--adr.['mlperf-accuracy-script', 'openimages-accuracy-script']...` + - CM script: [process-mlperf-accuracy](https://github.com/mlcommons/cm4mlops/tree/master/script/process-mlperf-accuracy) + * `_rnnt` + - Environment variables: + - *CM_MODEL*: `rnnt` + - Workflow: + 1. ***Read "posthook_deps" on other CM scripts*** + * run,accuracy,mlperf,_librispeech + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_LOADGEN_MODE': ['accuracy', 'all'], 'CM_MLPERF_ACCURACY_RESULTS_DIR': ['on']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_IMPLEMENTATION': ['nvidia']}` + * CM names: `--adr.['mlperf-accuracy-script', 'librispeech-accuracy-script']...` + - CM script: [process-mlperf-accuracy](https://github.com/mlcommons/cm4mlops/tree/master/script/process-mlperf-accuracy) + * `_sdxl` + - Environment variables: + - *CM_MODEL*: `stable-diffusion-xl` + - Workflow: + 1. ***Read "posthook_deps" on other CM scripts*** + * run,accuracy,mlperf,_coco2014 + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_LOADGEN_MODE': ['accuracy', 'all'], 'CM_MLPERF_ACCURACY_RESULTS_DIR': ['on']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_IMPLEMENTATION': ['nvidia']}` + * CM names: `--adr.['mlperf-accuracy-script', 'coco2014-accuracy-script']...` + - CM script: [process-mlperf-accuracy](https://github.com/mlcommons/cm4mlops/tree/master/script/process-mlperf-accuracy) + +
+ + + * Group "**precision**" +
+ Click here to expand this section. + + * `_bfloat16` + - Environment variables: + - *CM_MLPERF_QUANTIZATION*: `False` + - *CM_MLPERF_MODEL_PRECISION*: `float32` + - Workflow: + * `_float16` + - Environment variables: + - *CM_MLPERF_QUANTIZATION*: `False` + - *CM_MLPERF_MODEL_PRECISION*: `float32` + - Workflow: + * **`_float32`** (default) + - Aliases: `_fp32` + - Environment variables: + - *CM_MLPERF_QUANTIZATION*: `False` + - *CM_MLPERF_MODEL_PRECISION*: `float32` + - Workflow: + * `_int4` + - Environment variables: + - *CM_MLPERF_QUANTIZATION*: `True` + - *CM_MLPERF_MODEL_PRECISION*: `int4` + - Workflow: + * `_int8` + - Aliases: `_quantized` + - Environment variables: + - *CM_MLPERF_QUANTIZATION*: `True` + - *CM_MLPERF_MODEL_PRECISION*: `int8` + - Workflow: + * `_uint8` + - Environment variables: + - *CM_MLPERF_QUANTIZATION*: `True` + - *CM_MLPERF_MODEL_PRECISION*: `uint8` + - Workflow: + +
+ + + * Group "**execution-mode**" +
+ Click here to expand this section. + + * `_fast` + - Environment variables: + - *CM_FAST_FACTOR*: `5` + - *CM_OUTPUT_FOLDER_NAME*: `fast_results` + - *CM_MLPERF_RUN_STYLE*: `fast` + - Workflow: + * **`_test`** (default) + - Environment variables: + - *CM_OUTPUT_FOLDER_NAME*: `test_results` + - *CM_MLPERF_RUN_STYLE*: `test` + - Workflow: + * `_valid` + - Environment variables: + - *CM_OUTPUT_FOLDER_NAME*: `valid_results` + - *CM_MLPERF_RUN_STYLE*: `valid` + - Workflow: + +
+ + + * Group "**reproducibility**" +
+ Click here to expand this section. + + * `_r2.1_default` + - Environment variables: + - *CM_SKIP_SYS_UTILS*: `yes` + - *CM_TEST_QUERY_COUNT*: `100` + - Workflow: + * `_r3.0_default` + - Environment variables: + - *CM_SKIP_SYS_UTILS*: `yes` + - Workflow: + * `_r3.1_default` + - Workflow: + * `_r4.0_default` + - Workflow: + +
+ + + * *Internal group (variations should not be selected manually)* +
+ Click here to expand this section. + + * `_3d-unet_` + - Environment variables: + - *CM_MLPERF_MODEL_EQUAL_ISSUE_MODE*: `yes` + - Workflow: + 1. ***Read "posthook_deps" on other CM scripts*** + * run,accuracy,mlperf,_kits19,_int8 + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_LOADGEN_MODE': ['accuracy', 'all'], 'CM_MLPERF_ACCURACY_RESULTS_DIR': ['on']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_IMPLEMENTATION': ['nvidia']}` + * CM names: `--adr.['mlperf-accuracy-script', '3d-unet-accuracy-script']...` + - CM script: [process-mlperf-accuracy](https://github.com/mlcommons/cm4mlops/tree/master/script/process-mlperf-accuracy) + * `_bert_` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,dataset,squad,language-processing + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_DATASET_SQUAD_VAL_PATH': 'on'}` + - CM script: [get-dataset-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-squad) + * get,dataset-aux,squad-vocab + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_ML_MODEL_BERT_VOCAB_FILE_WITH_PATH': 'on'}` + - CM script: [get-dataset-squad-vocab](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-squad-vocab) + 1. ***Read "posthook_deps" on other CM scripts*** + * run,accuracy,mlperf,_squad + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_LOADGEN_MODE': ['accuracy', 'all'], 'CM_MLPERF_ACCURACY_RESULTS_DIR': ['on']}` + * CM names: `--adr.['squad-accuracy-script', 'mlperf-accuracy-script']...` + - CM script: [process-mlperf-accuracy](https://github.com/mlcommons/cm4mlops/tree/master/script/process-mlperf-accuracy) + * `_dlrm_` + - Workflow: + 1. ***Read "posthook_deps" on other CM scripts*** + * run,accuracy,mlperf,_terabyte,_float32 + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_LOADGEN_MODE': ['accuracy', 'all'], 'CM_MLPERF_ACCURACY_RESULTS_DIR': ['on']}` + * CM names: `--adr.['terabyte-accuracy-script', 'mlperf-accuracy-script']...` + - CM script: [process-mlperf-accuracy](https://github.com/mlcommons/cm4mlops/tree/master/script/process-mlperf-accuracy) + * `_gptj_` + - Aliases: `_gptj` + - Environment variables: + - *CM_MLPERF_MODEL_EQUAL_ISSUE_MODE*: `yes` + - Workflow: + 1. ***Read "posthook_deps" on other CM scripts*** + * run,accuracy,mlperf,_cnndm + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_LOADGEN_MODE': ['accuracy', 'all'], 'CM_MLPERF_ACCURACY_RESULTS_DIR': ['on']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_IMPLEMENTATION': ['intel']}` + * CM names: `--adr.['cnndm-accuracy-script', 'mlperf-accuracy-script']...` + - CM script: [process-mlperf-accuracy](https://github.com/mlcommons/cm4mlops/tree/master/script/process-mlperf-accuracy) + * `_intel-original,gptj_` + - Workflow: + * `_llama2-70b_` + - Environment variables: + - *CM_MLPERF_MODEL_EQUAL_ISSUE_MODE*: `yes` + - Workflow: + 1. ***Read "posthook_deps" on other CM scripts*** + * run,accuracy,mlperf,_open-orca,_int32 + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_LOADGEN_MODE': ['accuracy', 'all'], 'CM_MLPERF_ACCURACY_RESULTS_DIR': ['on']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_IMPLEMENTATION': ['nvidia']}` + * CM names: `--adr.['mlperf-accuracy-script', 'open-orca-accuracy-script']...` + - CM script: [process-mlperf-accuracy](https://github.com/mlcommons/cm4mlops/tree/master/script/process-mlperf-accuracy) + * `_reference,bert_` + - Workflow: + * `_reference,dlrm-v2_` + - Workflow: + * `_reference,gptj_` + - Workflow: + * `_reference,llama2-70b_` + - Workflow: + * `_reference,sdxl_` + - Workflow: + +
+ + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_intel-original,bert-99` + - Workflow: + * `_intel-original,bert-99.9` + - Workflow: + * `_intel-original,gptj-99` + - Workflow: + * `_intel-original,gptj-99.9` + - Workflow: + * `_intel-original,gptj_,build-harness` + - Workflow: + * `_intel-original,resnet50` + - Workflow: + * `_intel-original,retinanet` + - Workflow: + * `_kilt,qaic,bert-99` + - Workflow: + * `_kilt,qaic,bert-99.9` + - Workflow: + * `_kilt,qaic,resnet50` + - Workflow: + * `_kilt,qaic,retinanet` + - Workflow: + * `_power` + - Environment variables: + - *CM_MLPERF_POWER*: `yes` + - *CM_SYSTEM_POWER*: `yes` + - Workflow: + * `_reference,resnet50` + - Workflow: + * `_reference,retinanet` + - Workflow: + * `_rnnt,reference` + - Environment variables: + - *CM_MLPERF_PRINT_SUMMARY*: `no` + - Workflow: + * `_valid,retinanet` + - Workflow: + +
+ + + * Group "**batch_size**" +
+ Click here to expand this section. + + * `_batch_size.#` + - Environment variables: + - *CM_MLPERF_LOADGEN_MAX_BATCHSIZE*: `#` + - Workflow: + +
+ + + * Group "**loadgen-scenario**" +
+ Click here to expand this section. + + * `_multistream` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `MultiStream` + - Workflow: + * **`_offline`** (default) + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `Offline` + - Workflow: + * `_server` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `Server` + - Workflow: + * `_singlestream` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `SingleStream` + - Workflow: + +
+ + +#### Unsupported or invalid variation combinations + + + +* `_resnet50,_pytorch` +* `_retinanet,_tf` +* `_nvidia-original,_tf` +* `_nvidia-original,_onnxruntime` +* `_nvidia-original,_pytorch` +* `_nvidia,_tf` +* `_nvidia,_onnxruntime` +* `_nvidia,_pytorch` +* `_gptj,_tf` + +#### Default variations + +`_cpu,_float32,_offline,_reference,_resnet50,_test` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--clean=value` → `CM_MLPERF_CLEAN_SUBMISSION_DIR=value` +* `--count=value` → `CM_MLPERF_LOADGEN_QUERY_COUNT=value` +* `--debug=value` → `CM_DEBUG_SCRIPT_BENCHMARK_PROGRAM=value` +* `--docker=value` → `CM_RUN_DOCKER_CONTAINER=value` +* `--gpu_name=value` → `CM_NVIDIA_GPU_NAME=value` +* `--hw_name=value` → `CM_HW_NAME=value` +* `--imagenet_path=value` → `IMAGENET_PATH=value` +* `--max_amps=value` → `CM_MLPERF_POWER_MAX_AMPS=value` +* `--max_batchsize=value` → `CM_MLPERF_LOADGEN_MAX_BATCHSIZE=value` +* `--max_volts=value` → `CM_MLPERF_POWER_MAX_VOLTS=value` +* `--mode=value` → `CM_MLPERF_LOADGEN_MODE=value` +* `--multistream_target_latency=value` → `CM_MLPERF_LOADGEN_MULTISTREAM_TARGET_LATENCY=value` +* `--ntp_server=value` → `CM_MLPERF_POWER_NTP_SERVER=value` +* `--num_threads=value` → `CM_NUM_THREADS=value` +* `--offline_target_qps=value` → `CM_MLPERF_LOADGEN_OFFLINE_TARGET_QPS=value` +* `--output_dir=value` → `OUTPUT_BASE_DIR=value` +* `--power=value` → `CM_MLPERF_POWER=value` +* `--power_server=value` → `CM_MLPERF_POWER_SERVER_ADDRESS=value` +* `--readme=value` → `CM_MLPERF_README=value` +* `--regenerate_files=value` → `CM_REGENERATE_MEASURE_FILES=value` +* `--rerun=value` → `CM_RERUN=value` +* `--scenario=value` → `CM_MLPERF_LOADGEN_SCENARIO=value` +* `--server_target_qps=value` → `CM_MLPERF_LOADGEN_SERVER_TARGET_QPS=value` +* `--singlestream_target_latency=value` → `CM_MLPERF_LOADGEN_SINGLESTREAM_TARGET_LATENCY=value` +* `--target_latency=value` → `CM_MLPERF_LOADGEN_TARGET_LATENCY=value` +* `--target_qps=value` → `CM_MLPERF_LOADGEN_TARGET_QPS=value` +* `--test_query_count=value` → `CM_TEST_QUERY_COUNT=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "clean":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_MLPERF_LOADGEN_MODE: `accuracy` +* CM_MLPERF_LOADGEN_SCENARIO: `Offline` +* CM_OUTPUT_FOLDER_NAME: `test_results` +* CM_MLPERF_RUN_STYLE: `test` +* CM_TEST_QUERY_COUNT: `10` +* CM_MLPERF_QUANTIZATION: `False` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,python + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,mlcommons,inference,src + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,mlperf,inference,utils + - CM script: [get-mlperf-inference-utils](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-utils) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference/_cm.yaml) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference/run.sh) + 1. ***Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference/_cm.yaml)*** + * get,mlperf,sut,description + - CM script: [get-mlperf-inference-sut-description](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-sut-description) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference/_cm.yaml) + +___ +### Script output +`cmr "app vision language mlcommons mlperf inference generic [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_MLPERF_*` +#### New environment keys auto-detected from customize + +* `CM_MLPERF_LOADGEN_COMPLIANCE_TEST` \ No newline at end of file diff --git a/docs/Modular-MLPerf-inference-benchmark-pipeline/benchmark-program-mlperf.md b/docs/Modular-MLPerf-inference-benchmark-pipeline/benchmark-program-mlperf.md new file mode 100644 index 0000000000..37dfa75d42 --- /dev/null +++ b/docs/Modular-MLPerf-inference-benchmark-pipeline/benchmark-program-mlperf.md @@ -0,0 +1,152 @@ +Automatically generated README for this automation recipe: **benchmark-program-mlperf** + +Category: **Modular MLPerf inference benchmark pipeline** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=benchmark-program-mlperf,cfff0132a8aa4018) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program-mlperf)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *mlperf,benchmark-mlperf* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "mlperf benchmark-mlperf" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=mlperf,benchmark-mlperf` + +`cm run script --tags=mlperf,benchmark-mlperf[,variations] ` + +*or* + +`cmr "mlperf benchmark-mlperf"` + +`cmr "mlperf benchmark-mlperf [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'mlperf,benchmark-mlperf' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="mlperf,benchmark-mlperf"``` + +#### Run this script via Docker (beta) + +`cm docker script "mlperf benchmark-mlperf[variations]" ` + +___ +### Customization + + +#### Variations + + * Group "**power-mode**" +
+ Click here to expand this section. + + * **`_no-power`** (default) + - Workflow: + 1. ***Read "post_deps" on other CM scripts*** + * benchmark-program,program + * CM names: `--adr.['benchmark-program']...` + - CM script: [benchmark-program](https://github.com/mlcommons/cm4mlops/tree/master/script/benchmark-program) + * `_power` + - Environment variables: + - *CM_MLPERF_POWER*: `yes` + - Workflow: + 1. ***Read "prehook_deps" on other CM scripts*** + * benchmark-program,program + * CM names: `--adr.['benchmark-program']...` + - CM script: [benchmark-program](https://github.com/mlcommons/cm4mlops/tree/master/script/benchmark-program) + 1. ***Read "post_deps" on other CM scripts*** + * run,mlperf,power,client + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_LOADGEN_MODE': ['performance']}` + * CM names: `--adr.['mlperf-power-client']...` + - CM script: [run-mlperf-power-client](https://github.com/mlcommons/cm4mlops/tree/master/script/run-mlperf-power-client) + +
+ + +#### Default variations + +`_no-power` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program-mlperf/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program-mlperf/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program-mlperf/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program-mlperf/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program-mlperf/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program-mlperf/_cm.json) + +___ +### Script output +`cmr "mlperf benchmark-mlperf [,variations]" -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Modular-MLPerf-inference-benchmark-pipeline/run-mlperf-inference-app.md b/docs/Modular-MLPerf-inference-benchmark-pipeline/run-mlperf-inference-app.md new file mode 100644 index 0000000000..641d37d54e --- /dev/null +++ b/docs/Modular-MLPerf-inference-benchmark-pipeline/run-mlperf-inference-app.md @@ -0,0 +1,405 @@ +Automatically generated README for this automation recipe: **run-mlperf-inference-app** + +Category: **Modular MLPerf inference benchmark pipeline** + +License: **Apache 2.0** + +Developers: [Arjun Suresh](https://www.linkedin.com/in/arjunsuresh), [Grigori Fursin](https://cKnowledge.org/gfursin) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=run-mlperf-inference-app,4a5d5b13fd7e4ac8) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-app)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *run-mlperf,inference* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "run-mlperf,inference" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=run-mlperf,inference` + +`cm run script --tags=run-mlperf,inference[,variations] [--input_flags]` + +*or* + +`cmr "run-mlperf,inference"` + +`cmr "run-mlperf,inference [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + + +#### Input Flags + +* --**division**=MLPerf division {open,closed} (*open*) +* --**category**=MLPerf category {edge,datacenter,network} (*edge*) +* --**device**=MLPerf device {cpu,cuda,rocm,qaic} (*cpu*) +* --**model**=MLPerf model {resnet50,retinanet,bert-99,bert-99.9,3d-unet-99,3d-unet-99.9,rnnt,dlrm-v2-99,dlrm-v2-99.9,gptj-99,gptj-99.9,sdxl,llama2-70b-99,llama2-70b-99.9,mobilenet,efficientnet} (*resnet50*) +* --**precision**=MLPerf model precision {float32,float16,bfloat16,int8,uint8} +* --**implementation**=MLPerf implementation {mlcommons-python,mlcommons-cpp,nvidia,intel,qualcomm,ctuning-cpp-tflite} (*mlcommons-python*) +* --**backend**=MLPerf framework (backend) {onnxruntime,tf,pytorch,deepsparse,tensorrt,glow,tvm-onnx} (*onnxruntime*) +* --**scenario**=MLPerf scenario {Offline,Server,SingleStream,MultiStream} (*Offline*) +* --**mode**=MLPerf benchmark mode {,accuracy,performance} +* --**execution_mode**=MLPerf execution mode {test,fast,valid} (*test*) +* --**sut**=SUT configuration (if known) +* --**submitter**=Submitter name (without space) (*CTuning*) +* --**results_dir**=Folder path to store results (defaults to the current working directory) +* --**submission_dir**=Folder path to store MLPerf submission tree +* --**adr.compiler.tags**=Compiler for loadgen and any C/C++ part of implementation +* --**adr.inference-src-loadgen.env.CM_GIT_URL**=Git URL for MLPerf inference sources to build LoadGen (to enable non-reference implementations) +* --**adr.inference-src.env.CM_GIT_URL**=Git URL for MLPerf inference sources to run benchmarks (to enable non-reference implementations) +* --**adr.mlperf-inference-implementation.max_batchsize**=Maximum batchsize to be used +* --**adr.mlperf-inference-implementation.num_threads**=Number of threads (reference & C++ implementation only) +* --**adr.python.name**=Python virtual environment name (optional) +* --**adr.python.version**=Force Python version (must have all system deps) +* --**adr.python.version_min**=Minimal Python version (*3.8*) +* --**power**=Measure power {yes,no} (*no*) +* --**adr.mlperf-power-client.power_server**=MLPerf Power server IP address (*192.168.0.15*) +* --**adr.mlperf-power-client.port**=MLPerf Power server port (*4950*) +* --**clean**=Clean run (*False*) +* --**compliance**=Whether to run compliance tests (applicable only for closed division) {yes,no} (*no*) +* --**dashboard_wb_project**=W&B dashboard project (*cm-mlperf-dse-testing*) +* --**dashboard_wb_user**=W&B dashboard user (*cmind*) +* --**hw_name**=MLPerf hardware name (for example "gcp.c3_standard_8", "nvidia_orin", "lenovo_p14s_gen_4_windows_11", "macbook_pro_m1_2", "thundercomm_rb6" ...) +* --**multistream_target_latency**=Set MultiStream target latency +* --**offline_target_qps**=Set LoadGen Offline target QPS +* --**quiet**=Quiet run (select default values for all questions) (*True*) +* --**server_target_qps**=Set Server target QPS +* --**singlestream_target_latency**=Set SingleStream target latency +* --**target_latency**=Set Target latency +* --**target_qps**=Set LoadGen target QPS +* --**j**=Print results dictionary to console at the end of the run (*False*) +* --**repro**=Record input/output/state/info files to make it easier to reproduce results (*False*) +* --**time**=Print script execution time at the end of the run (*True*) +* --**debug**=Debug this script (*False*) + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "division":...} +``` +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'run-mlperf,inference' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="run-mlperf,inference"``` + +#### Run this script via Docker (beta) + +`cm docker script "run-mlperf,inference[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_all-scenarios` + - Environment variables: + - *CM_MLPERF_LOADGEN_ALL_SCENARIOS*: `yes` + - Workflow: + * `_compliance` + - Environment variables: + - *CM_MLPERF_LOADGEN_COMPLIANCE*: `yes` + - Workflow: + * `_dashboard` + - Environment variables: + - *CM_MLPERF_DASHBOARD*: `on` + - Workflow: + +
+ + + * Group "**benchmark-version**" +
+ Click here to expand this section. + + * `_r2.1` + - Environment variables: + - *CM_MLPERF_INFERENCE_VERSION*: `2.1` + - *CM_RUN_MLPERF_INFERENCE_APP_DEFAULTS*: `r2.1_default` + - Workflow: + * `_r3.0` + - Environment variables: + - *CM_MLPERF_INFERENCE_VERSION*: `3.0` + - *CM_RUN_MLPERF_INFERENCE_APP_DEFAULTS*: `r3.0_default` + - Workflow: + * `_r3.1` + - Environment variables: + - *CM_MLPERF_INFERENCE_VERSION*: `3.1` + - *CM_RUN_MLPERF_INFERENCE_APP_DEFAULTS*: `r3.1_default` + - Workflow: + * `_r4.0` + - Environment variables: + - *CM_MLPERF_INFERENCE_VERSION*: `4.0` + - *CM_RUN_MLPERF_INFERENCE_APP_DEFAULTS*: `r4.0_default` + - Workflow: + +
+ + + * Group "**mode**" +
+ Click here to expand this section. + + * `_all-modes` + - Environment variables: + - *CM_MLPERF_LOADGEN_ALL_MODES*: `yes` + - Workflow: + +
+ + + * Group "**submission-generation**" +
+ Click here to expand this section. + + * `_accuracy-only` + - Environment variables: + - *CM_MLPERF_LOADGEN_MODE*: `accuracy` + - *CM_MLPERF_SUBMISSION_RUN*: `yes` + - *CM_RUN_MLPERF_ACCURACY*: `on` + - *CM_RUN_SUBMISSION_CHECKER*: `no` + - Workflow: + * **`_find-performance`** (default) + - Environment variables: + - *CM_MLPERF_FIND_PERFORMANCE_MODE*: `yes` + - *CM_MLPERF_LOADGEN_ALL_MODES*: `no` + - *CM_MLPERF_LOADGEN_MODE*: `performance` + - *CM_MLPERF_RESULT_PUSH_TO_GITHUB*: `False` + - Workflow: + * `_performance-only` + - Environment variables: + - *CM_MLPERF_LOADGEN_MODE*: `performance` + - *CM_MLPERF_SUBMISSION_RUN*: `yes` + - *CM_RUN_SUBMISSION_CHECKER*: `no` + - Workflow: + * `_populate-readme` + - Environment variables: + - *CM_MLPERF_README*: `yes` + - *CM_MLPERF_SUBMISSION_RUN*: `yes` + - *CM_RUN_SUBMISSION_CHECKER*: `no` + - Workflow: + * `_submission` + - Environment variables: + - *CM_MLPERF_LOADGEN_COMPLIANCE*: `yes` + - *CM_MLPERF_SUBMISSION_RUN*: `yes` + - *CM_RUN_MLPERF_ACCURACY*: `on` + - *CM_RUN_SUBMISSION_CHECKER*: `yes` + - *CM_TAR_SUBMISSION_DIR*: `yes` + - Workflow: + 1. ***Read "post_deps" on other CM scripts*** + * generate,mlperf,inference,submission + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_SUBMISSION_GENERATION': ['no', 'false', 'False', '0']}` + * CM names: `--adr.['submission-generator']...` + - CM script: [generate-mlperf-inference-submission](https://github.com/mlcommons/cm4mlops/tree/master/script/generate-mlperf-inference-submission) + +
+ + + * Group "**submission-generation-style**" +
+ Click here to expand this section. + + * `_full` + - Environment variables: + - *CM_MLPERF_SUBMISSION_GENERATION_STYLE*: `full` + - *CM_MLPERF_SKIP_SUBMISSION_GENERATION*: `yes` + - Workflow: + * **`_short`** (default) + - Environment variables: + - *CM_MLPERF_SUBMISSION_GENERATION_STYLE*: `short` + - Workflow: + +
+ + +#### Default variations + +`_find-performance,_short` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--backend=value` → `CM_MLPERF_BACKEND=value` +* `--batch_size=value` → `CM_MLPERF_LOADGEN_MAX_BATCHSIZE=value` +* `--category=value` → `CM_MLPERF_SUBMISSION_SYSTEM_TYPE=value` +* `--clean=value` → `CM_MLPERF_CLEAN_ALL=value` +* `--compliance=value` → `CM_MLPERF_LOADGEN_COMPLIANCE=value` +* `--dashboard_wb_project=value` → `CM_MLPERF_DASHBOARD_WANDB_PROJECT=value` +* `--dashboard_wb_user=value` → `CM_MLPERF_DASHBOARD_WANDB_USER=value` +* `--debug=value` → `CM_DEBUG_SCRIPT_BENCHMARK_PROGRAM=value` +* `--device=value` → `CM_MLPERF_DEVICE=value` +* `--division=value` → `CM_MLPERF_SUBMISSION_DIVISION=value` +* `--docker=value` → `CM_MLPERF_USE_DOCKER=value` +* `--dump_version_info=value` → `CM_DUMP_VERSION_INFO=value` +* `--execution_mode=value` → `CM_MLPERF_RUN_STYLE=value` +* `--find_performance=value` → `CM_MLPERF_FIND_PERFORMANCE_MODE=value` +* `--gpu_name=value` → `CM_NVIDIA_GPU_NAME=value` +* `--hw_name=value` → `CM_HW_NAME=value` +* `--hw_notes_extra=value` → `CM_MLPERF_SUT_SW_NOTES_EXTRA=value` +* `--imagenet_path=value` → `IMAGENET_PATH=value` +* `--implementation=value` → `CM_MLPERF_IMPLEMENTATION=value` +* `--lang=value` → `CM_MLPERF_IMPLEMENTATION=value` +* `--mode=value` → `CM_MLPERF_LOADGEN_MODE=value` +* `--model=value` → `CM_MLPERF_MODEL=value` +* `--multistream_target_latency=value` → `CM_MLPERF_LOADGEN_MULTISTREAM_TARGET_LATENCY=value` +* `--network=value` → `CM_NETWORK_LOADGEN=value` +* `--offline_target_qps=value` → `CM_MLPERF_LOADGEN_OFFLINE_TARGET_QPS=value` +* `--output_dir=value` → `OUTPUT_BASE_DIR=value` +* `--output_summary=value` → `MLPERF_INFERENCE_SUBMISSION_SUMMARY=value` +* `--output_tar=value` → `MLPERF_INFERENCE_SUBMISSION_TAR_FILE=value` +* `--performance_sample_count=value` → `CM_MLPERF_LOADGEN_PERFORMANCE_SAMPLE_COUNT=value` +* `--power=value` → `CM_SYSTEM_POWER=value` +* `--precision=value` → `CM_MLPERF_MODEL_PRECISION=value` +* `--preprocess_submission=value` → `CM_RUN_MLPERF_SUBMISSION_PREPROCESSOR=value` +* `--push_to_github=value` → `CM_MLPERF_RESULT_PUSH_TO_GITHUB=value` +* `--readme=value` → `CM_MLPERF_README=value` +* `--regenerate_accuracy_file=value` → `CM_MLPERF_REGENERATE_ACCURACY_FILE=value` +* `--regenerate_files=value` → `CM_REGENERATE_MEASURE_FILES=value` +* `--rerun=value` → `CM_RERUN=value` +* `--results_dir=value` → `OUTPUT_BASE_DIR=value` +* `--results_git_url=value` → `CM_MLPERF_RESULTS_GIT_REPO_URL=value` +* `--run_checker=value` → `CM_RUN_SUBMISSION_CHECKER=value` +* `--run_style=value` → `CM_MLPERF_RUN_STYLE=value` +* `--save_console_log=value` → `CM_SAVE_CONSOLE_LOG=value` +* `--scenario=value` → `CM_MLPERF_LOADGEN_SCENARIO=value` +* `--server_target_qps=value` → `CM_MLPERF_LOADGEN_SERVER_TARGET_QPS=value` +* `--singlestream_target_latency=value` → `CM_MLPERF_LOADGEN_SINGLESTREAM_TARGET_LATENCY=value` +* `--skip_submission_generation=value` → `CM_MLPERF_SKIP_SUBMISSION_GENERATION=value` +* `--skip_truncation=value` → `CM_SKIP_TRUNCATE_ACCURACY=value` +* `--submission_dir=value` → `CM_MLPERF_INFERENCE_SUBMISSION_DIR=value` +* `--submitter=value` → `CM_MLPERF_SUBMITTER=value` +* `--sut=value` → `CM_MLPERF_INFERENCE_SUT_VARIATION=value` +* `--sut_servers=value` → `CM_NETWORK_LOADGEN_SUT_SERVERS=value` +* `--sw_notes_extra=value` → `CM_MLPERF_SUT_SW_NOTES_EXTRA=value` +* `--system_type=value` → `CM_MLPERF_SUBMISSION_SYSTEM_TYPE=value` +* `--target_latency=value` → `CM_MLPERF_LOADGEN_TARGET_LATENCY=value` +* `--target_qps=value` → `CM_MLPERF_LOADGEN_TARGET_QPS=value` +* `--test_query_count=value` → `CM_TEST_QUERY_COUNT=value` +* `--threads=value` → `CM_NUM_THREADS=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "backend":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_MLPERF_IMPLEMENTATION: `reference` +* CM_MLPERF_MODEL: `resnet50` +* CM_MLPERF_RUN_STYLE: `test` + +
+ +#### Versions +* `master` +* `r2.1` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-app/_cm.yaml)*** + * detect,os + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_USE_DOCKER': [True]}` + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_USE_DOCKER': [True]}` + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,python3 + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_USE_DOCKER': [True]}` + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,mlcommons,inference,src + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,sut,description + - CM script: [get-mlperf-inference-sut-description](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-sut-description) + * get,mlperf,inference,results,dir + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_USE_DOCKER': [False]}` + * Skip this dependenecy only if all ENV vars are set:
+`{'OUTPUT_BASE_DIR': [True]}` + * CM names: `--adr.['get-mlperf-inference-results-dir']...` + - CM script: [get-mlperf-inference-results-dir](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-results-dir) + * install,pip-package,for-cmind-python,_package.tabulate + - CM script: [install-pip-package-for-cmind-python](https://github.com/mlcommons/cm4mlops/tree/master/script/install-pip-package-for-cmind-python) + * get,mlperf,inference,utils + - CM script: [get-mlperf-inference-utils](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-utils) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-app/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-app/_cm.yaml) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-app/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-app/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-app/_cm.yaml) + +___ +### Script output +`cmr "run-mlperf,inference [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Modular-MLPerf-training-benchmark-pipeline/app-mlperf-training-nvidia.md b/docs/Modular-MLPerf-training-benchmark-pipeline/app-mlperf-training-nvidia.md new file mode 100644 index 0000000000..baf219a06e --- /dev/null +++ b/docs/Modular-MLPerf-training-benchmark-pipeline/app-mlperf-training-nvidia.md @@ -0,0 +1,242 @@ +Automatically generated README for this automation recipe: **app-mlperf-training-nvidia** + +Category: **Modular MLPerf training benchmark pipeline** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-mlperf-training-nvidia,1e2e357618cc4674) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-nvidia)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *app,vision,language,mlcommons,mlperf,training,nvidia* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "app vision language mlcommons mlperf training nvidia" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=app,vision,language,mlcommons,mlperf,training,nvidia` + +`cm run script --tags=app,vision,language,mlcommons,mlperf,training,nvidia[,variations] [--input_flags]` + +*or* + +`cmr "app vision language mlcommons mlperf training nvidia"` + +`cmr "app vision language mlcommons mlperf training nvidia [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'app,vision,language,mlcommons,mlperf,training,nvidia' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="app,vision,language,mlcommons,mlperf,training,nvidia"``` + +#### Run this script via Docker (beta) + +`cm docker script "app vision language mlcommons mlperf training nvidia[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_bert` + - Environment variables: + - *CM_MLPERF_MODEL*: `bert` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_protobuf + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['tf', 'tflite']}` + * CM names: `--adr.['protobuf']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torch + * CM names: `--adr.['ml-engine-pytorch']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + +
+ + + * Group "**device**" +
+ Click here to expand this section. + + * **`_cuda`** (default) + - Environment variables: + - *CM_MLPERF_DEVICE*: `cuda` + - *USE_CUDA*: `True` + - Workflow: + * `_tpu` + - Environment variables: + - *CM_MLPERF_DEVICE*: `tpu` + - *CUDA_VISIBLE_DEVICES*: `` + - *USE_CUDA*: `False` + - Workflow: + +
+ + + * Group "**framework**" +
+ Click here to expand this section. + + * `_pytorch` + - Environment variables: + - *CM_MLPERF_BACKEND*: `pytorch` + - *CM_MLPERF_BACKEND_VERSION*: `<<>>` + - Workflow: + * `_tf` + - Aliases: `_tensorflow` + - Environment variables: + - *CM_MLPERF_BACKEND*: `tf` + - *CM_MLPERF_BACKEND_VERSION*: `<<>>` + - Workflow: + +
+ + +#### Default variations + +`_cuda` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--clean=value` → `CM_MLPERF_CLEAN_SUBMISSION_DIR=value` +* `--docker=value` → `CM_RUN_DOCKER_CONTAINER=value` +* `--hw_name=value` → `CM_HW_NAME=value` +* `--model=value` → `CM_MLPERF_CUSTOM_MODEL_PATH=value` +* `--num_threads=value` → `CM_NUM_THREADS=value` +* `--output_dir=value` → `OUTPUT_BASE_DIR=value` +* `--rerun=value` → `CM_RERUN=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "clean":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_MLPERF_SUT_NAME_IMPLEMENTATION_PREFIX: `nvidia` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-nvidia/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,python + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,mlperf,training,src + * CM names: `--adr.['training-src', 'mlperf-training-src']...` + - CM script: [get-mlperf-training-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-training-src) + * get,git,repo,_repo.https://github.com/mlcommons/training_results_v2.1 + * CM names: `--adr.['training-results', 'mlperf-training-results']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + * get,cuda + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_DEVICE': ['cuda']}` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * get,generic-python-lib,_torchvision_cuda + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['pytorch'], 'CM_MLPERF_DEVICE': ['cuda']}` + * CM names: `--adr.['ml-engine-torchvision']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_mlperf_logging + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * prepare,mlperf,training,data,bert,_nvidia + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_MODEL': ['bert']}` + * CM names: `--adr.['prepare-data', 'bert-model']...` + - CM script: [prepare-training-data-bert](https://github.com/mlcommons/cm4mlops/tree/master/script/prepare-training-data-bert) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-nvidia/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-nvidia/_cm.yaml) + 1. ***Run native script if exists*** + * [run-bert-training.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-nvidia/run-bert-training.sh) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-nvidia/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-nvidia/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-nvidia/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-nvidia/_cm.yaml) + +___ +### Script output +`cmr "app vision language mlcommons mlperf training nvidia [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_DATASET_*` +* `CM_HW_NAME` +* `CM_MLPERF_*` +* `CM_ML_MODEL_*` +#### New environment keys auto-detected from customize diff --git a/docs/Modular-MLPerf-training-benchmark-pipeline/app-mlperf-training-reference.md b/docs/Modular-MLPerf-training-benchmark-pipeline/app-mlperf-training-reference.md new file mode 100644 index 0000000000..91149ed22c --- /dev/null +++ b/docs/Modular-MLPerf-training-benchmark-pipeline/app-mlperf-training-reference.md @@ -0,0 +1,240 @@ +Automatically generated README for this automation recipe: **app-mlperf-training-reference** + +Category: **Modular MLPerf training benchmark pipeline** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-mlperf-training-reference,0c4b11bdcf494b4f) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-reference)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *app,vision,language,mlcommons,mlperf,training,reference,ref* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "app vision language mlcommons mlperf training reference ref" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=app,vision,language,mlcommons,mlperf,training,reference,ref` + +`cm run script --tags=app,vision,language,mlcommons,mlperf,training,reference,ref[,variations] [--input_flags]` + +*or* + +`cmr "app vision language mlcommons mlperf training reference ref"` + +`cmr "app vision language mlcommons mlperf training reference ref [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'app,vision,language,mlcommons,mlperf,training,reference,ref' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="app,vision,language,mlcommons,mlperf,training,reference,ref"``` + +#### Run this script via Docker (beta) + +`cm docker script "app vision language mlcommons mlperf training reference ref[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_bert` + - Environment variables: + - *CM_MLPERF_MODEL*: `bert` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_protobuf + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['tf', 'tflite']}` + * CM names: `--adr.['protobuf']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torch + * CM names: `--adr.['ml-engine-pytorch']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + +
+ + + * Group "**device**" +
+ Click here to expand this section. + + * **`_cuda`** (default) + - Environment variables: + - *CM_MLPERF_DEVICE*: `cuda` + - *USE_CUDA*: `True` + - Workflow: + * `_tpu` + - Environment variables: + - *CM_MLPERF_DEVICE*: `tpu` + - *CUDA_VISIBLE_DEVICES*: `` + - *USE_CUDA*: `False` + - Workflow: + +
+ + + * Group "**framework**" +
+ Click here to expand this section. + + * `_pytorch` + - Environment variables: + - *CM_MLPERF_BACKEND*: `pytorch` + - *CM_MLPERF_BACKEND_VERSION*: `<<>>` + - Workflow: + * `_tf` + - Aliases: `_tensorflow` + - Environment variables: + - *CM_MLPERF_BACKEND*: `tf` + - *CM_MLPERF_BACKEND_VERSION*: `<<>>` + - Workflow: + +
+ + +#### Default variations + +`_cuda` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--clean=value` → `CM_MLPERF_CLEAN_SUBMISSION_DIR=value` +* `--docker=value` → `CM_RUN_DOCKER_CONTAINER=value` +* `--hw_name=value` → `CM_HW_NAME=value` +* `--model=value` → `CM_MLPERF_CUSTOM_MODEL_PATH=value` +* `--num_threads=value` → `CM_NUM_THREADS=value` +* `--output_dir=value` → `OUTPUT_BASE_DIR=value` +* `--rerun=value` → `CM_RERUN=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "clean":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_MLPERF_SUT_NAME_IMPLEMENTATION_PREFIX: `reference` +* CM_MLPERF_SUT_NAME_RUN_CONFIG_SUFFIX: `` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-reference/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,python + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,mlperf,training,src + * CM names: `--adr.['training-src']...` + - CM script: [get-mlperf-training-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-training-src) + * get,cuda + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_DEVICE': ['cuda']}` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * get,generic-python-lib,_torchvision_cuda + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['pytorch'], 'CM_MLPERF_DEVICE': ['cuda']}` + * CM names: `--adr.['ml-engine-torchvision']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_mlperf_logging + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * prepare,mlperf,training,data,bert,_reference + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_MODEL': ['bert']}` + * CM names: `--adr.['prepare-data', 'bert-model']...` + - CM script: [prepare-training-data-bert](https://github.com/mlcommons/cm4mlops/tree/master/script/prepare-training-data-bert) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-reference/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-reference/_cm.yaml) + 1. ***Run native script if exists*** + * [run-bert-training.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-reference/run-bert-training.sh) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-reference/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-reference/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-reference/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-reference/_cm.yaml) + +___ +### Script output +`cmr "app vision language mlcommons mlperf training reference ref [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_DATASET_*` +* `CM_HW_NAME` +* `CM_MLPERF_*` +* `CM_ML_MODEL_*` +#### New environment keys auto-detected from customize diff --git a/docs/Modular-application-pipeline/app-image-corner-detection.md b/docs/Modular-application-pipeline/app-image-corner-detection.md new file mode 100644 index 0000000000..933030b4f2 --- /dev/null +++ b/docs/Modular-application-pipeline/app-image-corner-detection.md @@ -0,0 +1,129 @@ +Automatically generated README for this automation recipe: **app-image-corner-detection** + +Category: **Modular application pipeline** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-image-corner-detection,998ffee0bc534d0a) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-corner-detection)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *app,image,corner-detection* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "app image corner-detection" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=app,image,corner-detection` + +`cm run script --tags=app,image,corner-detection ` + +*or* + +`cmr "app image corner-detection"` + +`cmr "app image corner-detection " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'app,image,corner-detection' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="app,image,corner-detection"``` + +#### Run this script via Docker (beta) + +`cm docker script "app image corner-detection" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-corner-detection/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-corner-detection/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-corner-detection/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-corner-detection/run.sh) + 1. ***Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-corner-detection/_cm.json)*** + * compile,cpp-program + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_SKIP_COMPILE': ['on']}` + - CM script: [compile-program](https://github.com/mlcommons/cm4mlops/tree/master/script/compile-program) + * benchmark-program + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_SKIP_RUN': ['on']}` + - CM script: [benchmark-program](https://github.com/mlcommons/cm4mlops/tree/master/script/benchmark-program) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-corner-detection/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-corner-detection/_cm.json) + +___ +### Script output +`cmr "app image corner-detection " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Platform-information/detect-cpu.md b/docs/Platform-information/detect-cpu.md new file mode 100644 index 0000000000..353ee6d4b7 --- /dev/null +++ b/docs/Platform-information/detect-cpu.md @@ -0,0 +1,128 @@ +Automatically generated README for this automation recipe: **detect-cpu** + +Category: **Platform information** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=detect-cpu,586c8a43320142f7) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-cpu)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *detect,cpu,detect-cpu,info* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "detect cpu detect-cpu info" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=detect,cpu,detect-cpu,info` + +`cm run script --tags=detect,cpu,detect-cpu,info ` + +*or* + +`cmr "detect cpu detect-cpu info"` + +`cmr "detect cpu detect-cpu info " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'detect,cpu,detect-cpu,info' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="detect,cpu,detect-cpu,info"``` + +#### Run this script via Docker (beta) + +`cm docker script "detect cpu detect-cpu info" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-cpu/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-cpu/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-cpu/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-cpu/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-cpu/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-cpu/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-cpu/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-cpu/_cm.json) + +___ +### Script output +`cmr "detect cpu detect-cpu info " -j` +#### New environment keys (filter) + +* `CM_HOST_CPU_*` +* `CM_HOST_DISK_CAPACITY` +* `CM_HOST_MEMORY_CAPACITY` +#### New environment keys auto-detected from customize + +* `CM_HOST_CPU_PHYSICAL_CORES_PER_SOCKET` +* `CM_HOST_CPU_SOCKETS` +* `CM_HOST_CPU_THREADS_PER_CORE` +* `CM_HOST_CPU_TOTAL_LOGICAL_CORES` \ No newline at end of file diff --git a/docs/Platform-information/detect-os.md b/docs/Platform-information/detect-os.md new file mode 100644 index 0000000000..07061659ac --- /dev/null +++ b/docs/Platform-information/detect-os.md @@ -0,0 +1,138 @@ +Automatically generated README for this automation recipe: **detect-os** + +Category: **Platform information** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=detect-os,863735b7db8c44fc) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-os)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *detect-os,detect,os,info* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "detect-os detect os info" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=detect-os,detect,os,info` + +`cm run script --tags=detect-os,detect,os,info ` + +*or* + +`cmr "detect-os detect os info"` + +`cmr "detect-os detect os info " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'detect-os,detect,os,info' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="detect-os,detect,os,info"``` + +#### Run this script via Docker (beta) + +`cm docker script "detect-os detect os info" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-os/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-os/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-os/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-os/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-os/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-os/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-os/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-os/_cm.json)*** + * get,sys-utils-min + * Enable this dependency only if all ENV vars are set:
+`{'CM_HOST_OS_TYPE': ['windows']}` + - CM script: [get-sys-utils-min](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-min) + +___ +### Script output +`cmr "detect-os detect os info " -j` +#### New environment keys (filter) + +* `+CM_HOST_OS_*` +* `+PATH` +* `CM_HOST_OS_*` +* `CM_HOST_PLATFORM_*` +* `CM_HOST_PYTHON_*` +* `CM_HOST_SYSTEM_NAME` +* `CM_RUN_STATE_DOCKER` +#### New environment keys auto-detected from customize + +* `CM_HOST_OS_BITS` +* `CM_HOST_OS_MACHINE` +* `CM_HOST_OS_PACKAGE_MANAGER` +* `CM_HOST_OS_PACKAGE_MANAGER_INSTALL_CMD` +* `CM_HOST_OS_PACKAGE_MANAGER_UPDATE_CMD` +* `CM_HOST_OS_TYPE` +* `CM_HOST_PYTHON_BITS` +* `CM_HOST_SYSTEM_NAME` \ No newline at end of file diff --git a/docs/Python-automation/activate-python-venv.md b/docs/Python-automation/activate-python-venv.md new file mode 100644 index 0000000000..f2d9f47bf1 --- /dev/null +++ b/docs/Python-automation/activate-python-venv.md @@ -0,0 +1,121 @@ +Automatically generated README for this automation recipe: **activate-python-venv** + +Category: **Python automation** + +License: **Apache 2.0** + +Developers: [Grigori Fursin](https://cKnowledge.org/gfursin) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=activate-python-venv,fcbbb84946f34c55) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/activate-python-venv)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *activate,python-venv* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "activate python-venv" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=activate,python-venv` + +`cm run script --tags=activate,python-venv ` + +*or* + +`cmr "activate python-venv"` + +`cmr "activate python-venv " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'activate,python-venv' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="activate,python-venv"``` + +#### Run this script via Docker (beta) + +`cm docker script "activate python-venv" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/activate-python-venv/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/activate-python-venv/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/activate-python-venv/_cm.json)*** + * install,python-venv + * CM names: `--adr.['python-venv']...` + - CM script: [install-python-venv](https://github.com/mlcommons/cm4mlops/tree/master/script/install-python-venv) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/activate-python-venv/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/activate-python-venv/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/activate-python-venv/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/activate-python-venv/_cm.json) + +___ +### Script output +`cmr "activate python-venv " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Python-automation/get-generic-python-lib.md b/docs/Python-automation/get-generic-python-lib.md new file mode 100644 index 0000000000..ce3f9525ca --- /dev/null +++ b/docs/Python-automation/get-generic-python-lib.md @@ -0,0 +1,681 @@ +Automatically generated README for this automation recipe: **get-generic-python-lib** + +Category: **Python automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-generic-python-lib,94b62a682bc44791) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-python-lib)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,generic-python-lib* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get generic-python-lib" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,generic-python-lib` + +`cm run script --tags=get,generic-python-lib[,variations] [--input_flags]` + +*or* + +`cmr "get generic-python-lib"` + +`cmr "get generic-python-lib [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,generic-python-lib' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,generic-python-lib"``` + +#### Run this script via Docker (beta) + +`cm docker script "get generic-python-lib[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_Pillow` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `Pillow` + - Workflow: + * `_apache-tvm` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `apache-tvm` + - *CM_GENERIC_PYTHON_PIP_EXTRA*: ` --pre` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_typing_extensions + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_apex` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `apex` + - Workflow: + * `_async_timeout` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `async_timeout` + - Workflow: + * `_attr` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `attr` + - Workflow: + * `_attrs` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `attrs` + - Workflow: + * `_boto3` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `boto3` + - Workflow: + * `_cloudpickle` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `cloudpickle` + - Workflow: + * `_cmind` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `cmind` + - Workflow: + * `_colored` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `colored` + - *CM_GENERIC_PYTHON_PIP_EXTRA_INDEX_URL*: `https://pypi.ngc.nvidia.com` + - Workflow: + * `_conda.#` + - Workflow: + * `_cupy` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `cupy` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,cuda + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * `_custom-python` + - Environment variables: + - *CM_TMP_USE_CUSTOM_PYTHON*: `on` + - Workflow: + * `_datasets` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `datasets` + - Workflow: + * `_decorator` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `decorator` + - Workflow: + * `_deepsparse` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `deepsparse` + - Workflow: + * `_dllogger` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `dllogger` + - *CM_GENERIC_PYTHON_PIP_URL*: `git+https://github.com/NVIDIA/dllogger#egg=dllogger` + - Workflow: + * `_fiftyone` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `fiftyone` + - Workflow: + * `_google-api-python-client` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `google_api_python_client` + - Workflow: + * `_google-auth-oauthlib` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `google_auth_oauthlib` + - Workflow: + * `_huggingface_hub` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `huggingface_hub` + - Workflow: + * `_inflect` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `inflect` + - Workflow: + * `_jax` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `jax` + - Workflow: + * `_jax_cuda` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `jax[cuda]` + - *CM_GENERIC_PYTHON_PIP_EXTRA*: `-f https://storage.googleapis.com/jax-releases/jax_cuda_releases.html` + - *CM_JAX_VERSION_EXTRA*: `CUDA` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,cuda + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * `_librosa` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `librosa` + - Workflow: + * `_matplotlib` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `matplotlib` + - Workflow: + * `_mlperf_loadgen` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `mlperf_loadgen` + - *CM_GENERIC_PYTHON_PIP_URL*: `git+https://github.com/mlcommons/inference.git#subdirectory=loadgen` + - Workflow: + * `_mlperf_logging` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `mlperf_logging` + - *CM_GENERIC_PYTHON_PIP_URL*: `git+https://github.com/mlperf/logging.git` + - Workflow: + * `_mpld3` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `mpld3` + - Workflow: + * `_nibabel` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `nibabel` + - Workflow: + * `_numpy` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `numpy` + - Workflow: + * `_nvidia-apex` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `apex` + - *CM_GENERIC_PYTHON_PACKAGE_VARIANT*: `nvidia-apex` + - *CM_GENERIC_PYTHON_PIP_URL*: `git+https://github.com/nvidia/apex@0da3ffb92ee6fbe5336602f0e3989db1cd16f880` + - Workflow: + * `_nvidia-apex-from-src` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `apex` + - *CM_GENERIC_PYTHON_PACKAGE_VARIANT*: `nvidia-apex` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,cuda + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * get,generic-python-lib,_torch_cuda + * CM names: `--adr.['torch']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,git,repo,_repo.https://github.com/NVIDIA/apex,_tag.23.05 + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + * `_nvidia-dali` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `nvidia-dali-cuda120` + - *CM_GENERIC_PYTHON_PIP_EXTRA*: ` --upgrade --default-timeout=900` + - *CM_GENERIC_PYTHON_PIP_EXTRA_INDEX_URL*: `https://developer.download.nvidia.com/compute/redist` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,cuda + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * `_nvidia-pycocotools` + - Environment variables: + - *CM_GENERIC_PYTHON_PIP_UNINSTALL_DEPS*: `pycocotools` + - *CM_GENERIC_PYTHON_PIP_URL*: `pycocotools@git+https://github.com/NVIDIA/cocoapi#subdirectory=PythonAPI` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_package.cython + * CM names: `--adr.['cython']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.numpy + * CM names: `--adr.['numpy']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_nvidia-pyindex` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `nvidia-pyindex` + - Workflow: + * `_nvidia-tensorrt` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `nvidia-tensorrt` + - Workflow: + * `_onnx` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `onnx` + - Workflow: + * `_onnx-graphsurgeon` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `onnx_graphsurgeon` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_package.nvidia-pyindex + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_onnxruntime` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `onnxruntime` + - Workflow: + * `_onnxruntime,rocm` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `onnxruntime-training` + - *CM_GENERIC_PYTHON_PIP_URL*: `https://download.onnxruntime.ai/onnxruntime_training-1.16.0%2Brocm56-cp3<<>>-cp3<<>>-manylinux_2_17_x86_64.manylinux2014_x86_64.whl` + - Workflow: + * `_onnxruntime_gpu` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `onnxruntime_gpu` + - *CM_ONNXRUNTIME_VERSION_EXTRA*: `GPU` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,cuda + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * `_opencv-python` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `opencv-python` + - Workflow: + * `_package.#` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `#` + - *CM_GENERIC_PYTHON_PIP_UNINSTALL_DEPS*: `` + - *CM_GENERIC_PYTHON_PIP_URL*: `` + - Workflow: + * `_pandas` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `pandas` + - Workflow: + * `_path.#` + - Environment variables: + - *CM_GENERIC_PYTHON_PIP_URL*: `#` + - Workflow: + * `_pillow` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `Pillow` + - Workflow: + * `_pip` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `pip` + - Workflow: + * `_polygraphy` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `polygraphy` + - *CM_GENERIC_PYTHON_PIP_EXTRA_INDEX_URL*: `https://pypi.ngc.nvidia.com` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_colored + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_pre` + - Environment variables: + - *CM_GENERIC_PYTHON_DEV_VERSION*: `yes` + - Workflow: + * `_protobuf` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `protobuf` + - Workflow: + * `_psutil` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `psutil` + - Workflow: + * `_pycocotools` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `pycocotools` + - Workflow: + * `_pycuda` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `pycuda` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,cuda + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * `_ray` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `ray[default]` + - Workflow: + * `_requests` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `requests` + - Workflow: + * `_rocm` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,rocm + * CM names: `--adr.['rocm']...` + - CM script: [get-rocm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-rocm) + * `_safetensors` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `safetensors` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,rust-compiler + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_HOST_PLATFORM_FLAVOR': ['x86_64']}` + - CM script: [get-compiler-rust](https://github.com/mlcommons/cm4mlops/tree/master/script/get-compiler-rust) + * `_scikit-learn` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `scikit-learn` + - Workflow: + * `_scipy` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `scipy` + - Workflow: + * `_scons` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `scons` + - Workflow: + * `_setfit` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `setfit` + - Workflow: + * `_setuptools` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `setuptools` + - Workflow: + * `_six` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `six` + - Workflow: + * `_sklearn` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `sklearn` + - Workflow: + * `_sox` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `sox` + - Workflow: + * `_sparsezoo` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `sparsezoo` + - Workflow: + * `_streamlit` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `streamlit` + - Workflow: + * `_streamlit_option_menu` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `streamlit_option_menu` + - Workflow: + * `_tensorboard` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `tensorboard` + - Workflow: + * `_tensorflow` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `tensorflow` + - Workflow: + * `_tensorflow,rocm` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `tensorflow-rocm` + - Workflow: + * `_tensorrt` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `tensorrt` + - *CM_GENERIC_PYTHON_PIP_EXTRA_INDEX_URL*: `https://download.pytorch.org/whl/<<>>` + - *CM_TORCH_VERSION_EXTRA*: `CUDA` + - Workflow: + * `_tflite` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `tflite` + - Workflow: + * `_tflite-runtime` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `tflite-runtime` + - Workflow: + * `_tokenization` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `tokenization` + - Workflow: + * `_toml` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `toml` + - Workflow: + * `_torch` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `torch` + - *CM_GENERIC_PYTHON_PIP_EXTRA_INDEX_URL*: `https://download.pytorch.org/whl/cpu` + - Workflow: + * `_torch,pre` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `torch` + - *CM_GENERIC_PYTHON_PIP_EXTRA*: ` --pre` + - *CM_GENERIC_PYTHON_PIP_INDEX_URL*: `https://download.pytorch.org/whl/nightly/cpu` + - Workflow: + * `_torch,rocm` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `torch` + - *CM_GENERIC_PYTHON_PIP_INDEX_URL*: `https://download.pytorch.org/whl/rocm5.6` + - *CM_GENERIC_PYTHON_PIP_UNINSTALL_DEPS*: `torch` + - Workflow: + 1. ***Read "post_deps" on other CM scripts*** + * get,generic-python-lib,_torchvision,_rocm + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torchaudio,_rocm + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_torch_cuda` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `torch` + - *CM_GENERIC_PYTHON_PIP_EXTRA_INDEX_URL1*: `https://download.pytorch.org/whl/<<>>` + - *CM_TORCH_VERSION_EXTRA*: `CUDA` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,cuda + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * `_torch_cuda,pre` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `torch` + - *CM_GENERIC_PYTHON_PIP_EXTRA*: ` --pre` + - *CM_GENERIC_PYTHON_PIP_INDEX_URL*: `https://download.pytorch.org/whl/<<>>` + - *CM_TORCH_VERSION_EXTRA*: `CUDA` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,cuda + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * get,generic-python-lib,_numpy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_torch_tensorrt` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `torch-tensorrt` + - *CM_GENERIC_PYTHON_PIP_EXTRA_INDEX_URL*: `https://download.pytorch.org/whl/<<>>` + - *CM_TORCH_VERSION_EXTRA*: `CUDA` + - Workflow: + * `_torchaudio` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `torchaudio` + - *CM_GENERIC_PYTHON_PIP_EXTRA_INDEX_URL*: `https://download.pytorch.org/whl/cpu` + - Workflow: + * `_torchaudio,rocm` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `torchaudio` + - *CM_GENERIC_PYTHON_PIP_INDEX_URL*: `https://download.pytorch.org/whl/rocm5.6` + - *CM_GENERIC_PYTHON_PIP_UNINSTALL_DEPS*: `torchaudio` + - Workflow: + * `_torchaudio_cuda` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `torchaudio` + - *CM_GENERIC_PYTHON_PIP_EXTRA_INDEX_URL1*: `https://download.pytorch.org/whl/<<>>` + - *CM_TORCHAUDIO_VERSION_EXTRA*: `CUDA` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,cuda + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * `_torchvision` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `torchvision` + - *CM_GENERIC_PYTHON_PIP_EXTRA_INDEX_URL*: `https://download.pytorch.org/whl/cpu` + - Workflow: + * `_torchvision,rocm` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `torchvision` + - *CM_GENERIC_PYTHON_PIP_INDEX_URL*: `https://download.pytorch.org/whl/rocm5.6` + - *CM_GENERIC_PYTHON_PIP_UNINSTALL_DEPS*: `torchvision` + - Workflow: + * `_torchvision_cuda` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `torchvision` + - *CM_TORCHVISION_VERSION_EXTRA*: `CUDA` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,cuda + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * `_tornado` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `tornado` + - Workflow: + * `_tqdm` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `tqdm` + - Workflow: + * `_transformers` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `transformers` + - Workflow: + * `_typing_extensions` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `typing_extensions` + - Workflow: + * `_ujson` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `ujson` + - Workflow: + * `_unidecode` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `unidecode` + - Workflow: + * `_url.#` + - Environment variables: + - *CM_GENERIC_PYTHON_PIP_URL*: `#` + - *CM_TMP_PYTHON_PACKAGE_FORCE_INSTALL*: `yes` + - Workflow: + * `_wandb` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `wandb` + - Workflow: + * `_west` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `west` + - Workflow: + * `_xgboost` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `xgboost` + - Workflow: + * `_xlsxwriter` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `xlsxwriter` + - Workflow: + +
+ + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--extra_index_url=value` → `CM_GENERIC_PYTHON_PIP_EXTRA_INDEX_URL=value` +* `--force_install=value` → `CM_TMP_PYTHON_PACKAGE_FORCE_INSTALL=value` +* `--index_url=value` → `CM_GENERIC_PYTHON_PIP_INDEX_URL=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "extra_index_url":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-python-lib/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,python3 + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_TMP_USE_CUSTOM_PYTHON': ['on']}` + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-python-lib,_pip + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_GENERIC_PYTHON_PACKAGE_NAME': ['pip']}` + * CM names: `--adr.['python-pip', 'pip']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-python-lib/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-python-lib/_cm.json)*** + * install,onnxruntime,from.src,_cuda + * Enable this dependency only if all ENV vars are set:
+`{'CM_INSTALL_ONNXRUNTIME_GPU_FROM_SRC': ['yes']}` + - CM script: [install-onnxruntime-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-onnxruntime-from-src) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-python-lib/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-python-lib/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-python-lib/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-python-lib/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-python-lib/_cm.json) + +___ +### Script output +`cmr "get generic-python-lib [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_PYTHONLIB_*` +#### New environment keys auto-detected from customize diff --git a/docs/Python-automation/get-python3.md b/docs/Python-automation/get-python3.md new file mode 100644 index 0000000000..2a011ed3c1 --- /dev/null +++ b/docs/Python-automation/get-python3.md @@ -0,0 +1,169 @@ +Automatically generated README for this automation recipe: **get-python3** + +Category: **Python automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-python3,d0b5dd74373f4a62) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-python3)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,python,python3,get-python,get-python3* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get python python3 get-python get-python3" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,python,python3,get-python,get-python3` + +`cm run script --tags=get,python,python3,get-python,get-python3[,variations] ` + +*or* + +`cmr "get python python3 get-python get-python3"` + +`cmr "get python python3 get-python get-python3 [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,python,python3,get-python,get-python3' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,python,python3,get-python,get-python3"``` + +#### Run this script via Docker (beta) + +`cm docker script "get python python3 get-python get-python3[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_conda.#` + - Environment variables: + - *CM_PYTHON_CONDA*: `yes` + - *CM_PYTHON_INSTALL_CACHE_TAGS*: `_conda.#` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic,conda-package,_name.#,_package.python + * CM names: `--adr.['conda-package', 'conda-python']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * `_custom-path.#` + - Environment variables: + - *CM_PYTHON_BIN_WITH_PATH*: `#` + - Workflow: + * `_lto` + - Workflow: + * `_optimized` + - Workflow: + * `_shared` + - Workflow: + * `_with-custom-ssl` + - Workflow: + * `_with-ssl` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-python3/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-python3/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-python3/_cm.json)*** + * install,python,src + * Enable this dependency only if all ENV vars are set:
+`{'CM_REQUIRE_INSTALL': ['yes']}` + - CM script: [install-python-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-python-src) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-python3/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-python3/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-python3/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-python3/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-python3/_cm.json) + +___ +### Script output +`cmr "get python python3 get-python get-python3 [,variations]" -j` +#### New environment keys (filter) + +* `+C_INCLUDE_PATH` +* `+LD_LIBRARY_PATH` +* `+PATH` +* `CM_PYTHON_*` +#### New environment keys auto-detected from customize + +* `CM_PYTHON_BIN` +* `CM_PYTHON_BIN_PATH` +* `CM_PYTHON_BIN_WITH_PATH` +* `CM_PYTHON_CACHE_TAGS` +* `CM_PYTHON_MAJOR_VERSION` +* `CM_PYTHON_MINOR_VERSION` +* `CM_PYTHON_PATCH_VERSION` \ No newline at end of file diff --git a/docs/Python-automation/install-generic-conda-package.md b/docs/Python-automation/install-generic-conda-package.md new file mode 100644 index 0000000000..6743ef9000 --- /dev/null +++ b/docs/Python-automation/install-generic-conda-package.md @@ -0,0 +1,158 @@ +Automatically generated README for this automation recipe: **install-generic-conda-package** + +Category: **Python automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-generic-conda-package,d9275487f5314195) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-generic-conda-package)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,install,generic,generic-conda-lib,conda-lib,conda-package,generic-conda-package* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get install generic generic-conda-lib conda-lib conda-package generic-conda-package" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,install,generic,generic-conda-lib,conda-lib,conda-package,generic-conda-package` + +`cm run script --tags=get,install,generic,generic-conda-lib,conda-lib,conda-package,generic-conda-package[,variations] ` + +*or* + +`cmr "get install generic generic-conda-lib conda-lib conda-package generic-conda-package"` + +`cmr "get install generic generic-conda-lib conda-lib conda-package generic-conda-package [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,install,generic,generic-conda-lib,conda-lib,conda-package,generic-conda-package' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,install,generic,generic-conda-lib,conda-lib,conda-package,generic-conda-package"``` + +#### Run this script via Docker (beta) + +`cm docker script "get install generic generic-conda-lib conda-lib conda-package generic-conda-package[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_name.#` + - Workflow: + * `_package.#` + - Environment variables: + - *CM_CONDA_PKG_NAME*: `#` + - Workflow: + +
+ + + * Group "**package-source**" +
+ Click here to expand this section. + + * `_source.#` + - Environment variables: + - *CM_CONDA_PKG_SRC*: `#` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-generic-conda-package/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,conda + * CM names: `--adr.['conda']...` + - CM script: [get-conda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-conda) + * get,conda + * CM names: `--adr.['conda']...` + - CM script: [get-conda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-conda) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-generic-conda-package/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-generic-conda-package/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-generic-conda-package/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-generic-conda-package/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-generic-conda-package/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-generic-conda-package/_cm.json) + +___ +### Script output +`cmr "get install generic generic-conda-lib conda-lib conda-package generic-conda-package [,variations]" -j` +#### New environment keys (filter) + +* `CM_PYTHONLIB_*` +#### New environment keys auto-detected from customize diff --git a/docs/Python-automation/install-python-src.md b/docs/Python-automation/install-python-src.md new file mode 100644 index 0000000000..1fd8e9eae8 --- /dev/null +++ b/docs/Python-automation/install-python-src.md @@ -0,0 +1,182 @@ +Automatically generated README for this automation recipe: **install-python-src** + +Category: **Python automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-python-src,12d3a608afe14a1e) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-src)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,src,python,python3,src-python3,src-python* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install src python python3 src-python3 src-python" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,src,python,python3,src-python3,src-python` + +`cm run script --tags=install,src,python,python3,src-python3,src-python[,variations] ` + +*or* + +`cmr "install src python python3 src-python3 src-python"` + +`cmr "install src python python3 src-python3 src-python [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,src,python,python3,src-python3,src-python' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,src,python,python3,src-python3,src-python"``` + +#### Run this script via Docker (beta) + +`cm docker script "install src python python3 src-python3 src-python[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_lto` + - Environment variables: + - *CM_PYTHON_LTO_FLAG*: ` --lto` + - *CM_PYTHON_INSTALL_CACHE_TAGS*: `with-lto` + - Workflow: + * `_optimized` + - Environment variables: + - *CM_PYTHON_OPTIMIZATION_FLAG*: ` --enable-optimizations` + - *CM_PYTHON_INSTALL_CACHE_TAGS*: `optimized` + - Workflow: + * `_shared` + - Environment variables: + - *CM_PYTHON_INSTALL_CACHE_TAGS*: `shared` + - *CM_SHARED_BUILD*: `yes` + - Workflow: + * `_with-custom-ssl` + - Environment variables: + - *CM_CUSTOM_SSL*: `yes` + - *CM_PYTHON_INSTALL_CACHE_TAGS*: `with-custom-ssl` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,openssl + - CM script: [get-openssl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-openssl) + * `_with-ssl` + - Environment variables: + - *CM_ENABLE_SSL*: `yes` + - *CM_PYTHON_INSTALL_CACHE_TAGS*: `with-ssl` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_ENABLE_SSL: `no` +* CM_CUSTOM_SSL: `no` +* CM_SHARED_BUILD: `no` +* CM_PYTHON_OPTIMIZATION_FLAG: `` +* CM_PYTHON_LTO_FLAG: `` +* CM_WGET_URL: `https://www.python.org/ftp/python/[PYTHON_VERSION]/Python-[PYTHON_VERSION].tgz` + +
+ +#### Versions +Default version: `3.10.13` + +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-src/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-src/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-src/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-src/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-src/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-src/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-src/_cm.json)*** + * get,python3 + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_REQUIRE_INSTALL': ['yes']}` + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + +___ +### Script output +`cmr "install src python python3 src-python3 src-python [,variations]" -j` +#### New environment keys (filter) + +* `+C_INCLUDE_PATH` +* `+LD_LIBRARY_PATH` +* `+PATH` +* `CM_PYTHON_BIN_WITH_PATH` +* `CM_PYTHON_INSTALL_PATH` +#### New environment keys auto-detected from customize + +* `CM_PYTHON_BIN_WITH_PATH` \ No newline at end of file diff --git a/docs/Python-automation/install-python-venv.md b/docs/Python-automation/install-python-venv.md new file mode 100644 index 0000000000..8b269d7413 --- /dev/null +++ b/docs/Python-automation/install-python-venv.md @@ -0,0 +1,152 @@ +Automatically generated README for this automation recipe: **install-python-venv** + +Category: **Python automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-python-venv,7633ebada4584c6c) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-venv)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,python,get-python-venv,python-venv* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install python get-python-venv python-venv" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,python,get-python-venv,python-venv` + +`cm run script --tags=install,python,get-python-venv,python-venv[,variations] ` + +*or* + +`cmr "install python get-python-venv python-venv"` + +`cmr "install python get-python-venv python-venv [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,python,get-python-venv,python-venv' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,python,get-python-venv,python-venv"``` + +#### Run this script via Docker (beta) + +`cm docker script "install python get-python-venv python-venv[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_lto` + - Workflow: + * `_optimized` + - Workflow: + * `_shared` + - Workflow: + * `_with-custom-ssl` + - Workflow: + * `_with-ssl` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-venv/_cm.json)*** + * get,python,-virtual + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-venv/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-venv/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-venv/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-venv/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-venv/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-venv/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-venv/_cm.json)*** + * get,python3 + * CM names: `--adr.['register-python']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + +___ +### Script output +`cmr "install python get-python-venv python-venv [,variations]" -j` +#### New environment keys (filter) + +* `CM_PYTHON_BIN_WITH_PATH` +* `CM_VIRTUAL_ENV_*` +#### New environment keys auto-detected from customize + +* `CM_PYTHON_BIN_WITH_PATH` +* `CM_VIRTUAL_ENV_DIR` +* `CM_VIRTUAL_ENV_PATH` +* `CM_VIRTUAL_ENV_SCRIPTS_PATH` \ No newline at end of file diff --git a/docs/Remote-automation/remote-run-commands.md b/docs/Remote-automation/remote-run-commands.md new file mode 100644 index 0000000000..7782b7f6fe --- /dev/null +++ b/docs/Remote-automation/remote-run-commands.md @@ -0,0 +1,145 @@ +Automatically generated README for this automation recipe: **remote-run-commands** + +Category: **Remote automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=remote-run-commands,b71e24b03c9d49cd) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/remote-run-commands)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *remote,run,cmds,remote-run,remote-run-cmds,ssh-run,ssh* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "remote run cmds remote-run remote-run-cmds ssh-run ssh" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=remote,run,cmds,remote-run,remote-run-cmds,ssh-run,ssh` + +`cm run script --tags=remote,run,cmds,remote-run,remote-run-cmds,ssh-run,ssh [--input_flags]` + +*or* + +`cmr "remote run cmds remote-run remote-run-cmds ssh-run ssh"` + +`cmr "remote run cmds remote-run remote-run-cmds ssh-run ssh " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'remote,run,cmds,remote-run,remote-run-cmds,ssh-run,ssh' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="remote,run,cmds,remote-run,remote-run-cmds,ssh-run,ssh"``` + +#### Run this script via Docker (beta) + +`cm docker script "remote run cmds remote-run remote-run-cmds ssh-run ssh" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--client_refresh=value` → `CM_SSH_CLIENT_REFRESH=value` +* `--host=value` → `CM_SSH_HOST=value` +* `--password=value` → `CM_SSH_PASSWORD=value` +* `--port=value` → `CM_SSH_PORT=value` +* `--run_cmds=value` → `CM_SSH_RUN_COMMANDS=value` +* `--skip_host_verify=value` → `CM_SSH_SKIP_HOST_VERIFY=value` +* `--ssh_key_file=value` → `CM_SSH_KEY_FILE=value` +* `--user=value` → `CM_SSH_USER=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "client_refresh":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_SSH_PORT: `22` +* CM_SSH_HOST: `localhost` +* CM_SSH_USER: `$USER` +* CM_SSH_CLIENT_REFRESH: `10` +* CM_SSH_KEY_FILE: `$HOME/.ssh/id_rsa` + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/remote-run-commands/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/remote-run-commands/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/remote-run-commands/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/remote-run-commands/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/remote-run-commands/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/remote-run-commands/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/remote-run-commands/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/remote-run-commands/_cm.json) + +___ +### Script output +`cmr "remote run cmds remote-run remote-run-cmds ssh-run ssh " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Reproduce-MLPerf-benchmarks/app-mlperf-inference-nvidia.md b/docs/Reproduce-MLPerf-benchmarks/app-mlperf-inference-nvidia.md new file mode 100644 index 0000000000..c7f83ff098 --- /dev/null +++ b/docs/Reproduce-MLPerf-benchmarks/app-mlperf-inference-nvidia.md @@ -0,0 +1,1333 @@ +Automatically generated README for this automation recipe: **app-mlperf-inference-nvidia** + +Category: **Reproduce MLPerf benchmarks** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-mlperf-inference-nvidia,bc3b17fb430f4732) ]* + +--- + +This script is a CM wrapper to the official [Nvidia submission code](https://github.com/mlcommons/inference_results_v3.0/tree/master/closed/NVIDIA) used for MLPerf inference submissions. + + + +## Download the needed files + +* Please ask privately in [this discord channel](https://discord.gg/y7hupJsUNb) if you would like to get access to an Amazon S3 bucket containing all the needed files for easiness. Otherwise, you can download them from the below links. + +For x86 machines, please download the latest install tar files from the below sites +1. [cuDNN](https://developer.nvidia.com/cudnn) (for cuda 11) +2. [TensorRT](https://developer.nvidia.com/tensorrt) +3. Imagenet validation set (unfortunately not available via public URL) following the instructions given [here](https://github.com/mlcommons/ck/blob/master/cm-mlops/script/get-dataset-imagenet-val/README-extra.md) + +
+ + + +## Using Docker (Recommended on x86 systems) + + +Assuming all the downloaded files are to the user home directory please do the following steps: + +1. Download CUDA 11.8 + ``` + wget https://developer.download.nvidia.com/compute/cuda/11.8.0/local_installers/cuda_11.8.0_520.61.05_linux.run + ``` +2. [Install docker](https://docs.docker.com/engine/install/) and [Nvidia container toolkit](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html) + +3. Give docker permission to the current user + ``` + sudo usermod -aG docker $USER + ``` + Logout and login + Restart docker if required and confirm that Nvidia container toolkit is working by + ``` + nvidia-ctk --version + ``` +4. Check if Nvidia driver is working properly on the host. + ``` + nvidia-smi + ``` + If the above command produces any error you'll need to install Nvidia drivers on the host. You can do this via CM if you have sudo access + ``` + cmr "install cuda prebuilt _driver" --version=11.8.0 + ``` +5. Build the docker container and mount the paths from the host machine. + ** You may want to change the `scratch_path` location as it can take 100s of GBs.** + ```bash + cm docker script --tags=build,nvidia,inference,server \ + --cuda_run_file_path=$HOME/cuda_11.8.0_520.61.05_linux.run \ + --tensorrt_tar_file_path=$HOME/TensorRT-8.6.1.6.Linux.x86_64-gnu.cuda-11.8.tar.gz \ + --cudnn_tar_file_path=$HOME/cudnn-linux-x86_64-8.9.2.26_cuda11-archive.tar.xz \ + --imagenet_path=$HOME/imagenet-2012-val \ + --scratch_path=$HOME/mlperf_scratch \ + --docker_cm_repo=mlcommons@cm4mlops \ + --results_dir=$HOME/results_dir \ + --submission_dir=$HOME/submission_dir \ + --adr.compiler.tags=gcc + ``` + * Use `--docker_cache=no` to turn off docker caching + * Use `--docker_run_cmd_prefix="cm pull repo mlcommons@cm4mlops"` to update the CK repository when docker caching is used + * Use `--custom_system=no` if you are using a similar system to the [Nvidia submission systems for MLPerf inference 3.0](https://github.com/mlcommons/inference_results_v3.0/tree/main/closed/NVIDIA/systems). + +6. At the end of the build you'll get the following prompt unless you have chosen `--custom_system=no`. Please give a system name and say yes to generating the configuration files + ### Example output + ``` + ============================================ + => A system ID is a string containing only letters, numbers, and underscores + => that is used as the human-readable name of the system. It is also used as + => the system name when creating the measurements/ and results/ entries. + => This string should also start with a letter to be a valid Python enum member name. + => Specify the system ID to use for the current system: phoenix + => Reloaded system list. MATCHED_SYSTEM: KnownSystem.phoenix + => This script will generate Benchmark Configuration stubs for the detected system. + Continue? [y/n]: y + ``` + Now you'll be inside the CM Nvidia docker container and can run further scripts. + +7. Once the build is complete, you can proceed with any further CM scripts like for MLPerf inference. You can also save the container at this stage using [docker commit](https://docs.docker.com/engine/reference/commandline/commit/) so that it can be launched later without having to go through the previous steps. + +
+ +
+ + + +## Without Docker + + +1. Install CUDA + If CUDA is not detected, CM should download and install it automatically when you run the workflow. + ** Nvidia drivers are expected to be installed on the system ** + +2. Install cuDNN + ```bash + cmr "get cudnn" --tar_file= + ``` +3. Install TensorRT + ```bash + cmr "get tensorrt _dev" --tar_file= + ``` + On non x86 systems like Nvidia Orin, you can do a package manager install and then CM should pick up the installation automatically during the workflow run. + +4. Build the Nvidia inference server + ``` + cmr "build nvidia inference server" \ + --adr.install-cuda-prebuilt.local_run_file_path=/data/cuda_11.8.0_520.61.05_linux.run \ + --adr.tensorrt.tar_file=/data/TensorRT-8.6.1.6.Linux.x86_64-gnu.cuda-11.8.tar.gz \ + --adr.cudnn.tar_file=/data/cudnn-linux-x86_64-8.9.2.26_cuda11-archive.tar.xz \ + --adr.compiler.tags=gcc \ + [--custom_system=no] + ``` + Use `--custom_system=no` if you are using a similar system to the [Nvidia submission systems for MLPerf inference 3.0](https://github.com/mlcommons/inference_results_v3.0/tree/main/closed/NVIDIA/systems). + +5. At the end of the build you'll get the following prompt unless you have chosen `--custom_system=no`. Please give a system name and say yes to generating the configuration files + + ### Example output + ``` + ============================================ + => A system ID is a string containing only letters, numbers, and underscores + => that is used as the human-readable name of the system. It is also used as + => the system name when creating the measurements/ and results/ entries. + => This string should also start with a letter to be a valid Python enum member name. + => Specify the system ID to use for the current system: phoenix + => Reloaded system list. MATCHED_SYSTEM: KnownSystem.phoenix + => This script will generate Benchmark Configuration stubs for the detected system. + Continue? [y/n]: y + ``` +
+ + +## Acknowledgments + +* A common CM interface and automation for MLPerf inference benchmarks was developed by Arjun Suresh and Grigori Fursin + sponsored by the [cTuning foundation](https://cTuning.org) and [cKnowledge.org](https://cKnowledge.org). +* Nvidia's MLPerf inference implementation was developed by Zhihan Jiang, Ethan Cheng, Yiheng Zhang and Jinho Suh. + + + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-nvidia)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *reproduce,mlcommons,mlperf,inference,harness,nvidia-harness,nvidia* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "reproduce mlcommons mlperf inference harness nvidia-harness nvidia" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=reproduce,mlcommons,mlperf,inference,harness,nvidia-harness,nvidia` + +`cm run script --tags=reproduce,mlcommons,mlperf,inference,harness,nvidia-harness,nvidia[,variations] [--input_flags]` + +*or* + +`cmr "reproduce mlcommons mlperf inference harness nvidia-harness nvidia"` + +`cmr "reproduce mlcommons mlperf inference harness nvidia-harness nvidia [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'reproduce,mlcommons,mlperf,inference,harness,nvidia-harness,nvidia' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="reproduce,mlcommons,mlperf,inference,harness,nvidia-harness,nvidia"``` + +#### Run this script via Docker (beta) + +`cm docker script "reproduce mlcommons mlperf inference harness nvidia-harness nvidia[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *Internal group (variations should not be selected manually)* +
+ Click here to expand this section. + + * `_3d-unet_` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_transformers + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.nibabel + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_pandas + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_bert_` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_transformers + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_safetensors + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_onnx + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_dlrm_` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_torch + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.torchsnapshot + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.torchrec + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.fbgemm-gpu + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_onnx-graphsurgeon + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.scikit-learn + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_gptj_` + - Environment variables: + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://cloud.mlcommons.org/index.php/s/QAZ2oM94MkFtbQx/download` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_package.datasets + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.simplejson + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + +
+ + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_a100,sxm,3d-unet_,offline,run_harness` + - Workflow: + * `_a100,sxm,bert_,offline,run_harness` + - Workflow: + * `_a100,sxm,dlrm_,offline,run_harness` + - Workflow: + * `_a100,sxm,resnet50,offline,run_harness` + - Environment variables: + - *CM_MLPERF_PERFORMANCE_SAMPLE_COUNT*: `2048` + - Workflow: + * `_a100,sxm,retinanet,offline,run_harness` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS*: `2` + - *CM_MLPERF_NVIDIA_HARNESS_GPU_INFERENCE_STREAMS*: `2` + - *CM_MLPERF_NVIDIA_HARNESS_WORKSPACE_SIZE*: `300000000000` + - Workflow: + * `_a100,sxm,rnnt,offline,run_harness` + - Workflow: + * `_gptj_,build` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * install,pytorch,from.src,_for-nvidia-mlperf-inference-v3.1 + - CM script: [install-pytorch-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-pytorch-from-src) + * get,cmake + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + * `_gptj_,build_engine` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * install,pytorch,from.src,_for-nvidia-mlperf-inference-v3.1 + - CM script: [install-pytorch-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-pytorch-from-src) + * get,cmake + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + * `_gptj_,run_harness` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_USE_FP8*: `True` + - *CM_MLPERF_NVIDIA_HARNESS_ENABLE_SORT*: `True` + - *CM_MLPERF_NVIDIA_HARNESS_NUM_SORT_SEGMENTS*: `2` + - *CM_MLPERF_NVIDIA_HARNESS_SKIP_POSTPROCESS*: `True` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * install,pytorch,from.src,_for-nvidia-mlperf-inference-v3.1 + - CM script: [install-pytorch-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-pytorch-from-src) + * get,cmake + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + * `_gpu_memory.16,3d-unet_,offline,run_harness` + - Workflow: + * `_gpu_memory.16,bert_,offline,run_harness` + - Workflow: + * `_gpu_memory.16,dlrm_,offline,run_harness` + - Workflow: + * `_gpu_memory.16,gptj_,offline,run_harness` + - Workflow: + * `_gpu_memory.16,resnet50,offline,run_harness` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS*: `4` + - Workflow: + * `_gpu_memory.16,retinanet,offline,run_harness` + - Workflow: + * `_gpu_memory.16,rnnt,offline,run_harness` + - Workflow: + * `_gpu_memory.24,3d-unet_,offline,run_harness` + - Workflow: + * `_gpu_memory.24,bert_,offline,run_harness` + - Workflow: + * `_gpu_memory.24,dlrm_,offline,run_harness` + - Workflow: + * `_gpu_memory.24,gptj_,offline,run_harness` + - Workflow: + * `_gpu_memory.24,resnet50,offline,run_harness` + - Workflow: + * `_gpu_memory.24,retinanet,offline,run_harness` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS*: `2` + - *CM_MLPERF_NVIDIA_HARNESS_GPU_INFERENCE_STREAMS*: `2` + - Workflow: + * `_gpu_memory.24,rnnt,offline,run_harness` + - Workflow: + * `_gpu_memory.32,3d-unet_,offline,run_harness` + - Workflow: + * `_gpu_memory.32,bert_,offline,run_harness` + - Workflow: + * `_gpu_memory.32,dlrm_,offline,run_harness` + - Workflow: + * `_gpu_memory.32,gptj_,offline,run_harness` + - Workflow: + * `_gpu_memory.32,resnet50,offline,run_harness` + - Workflow: + * `_gpu_memory.32,retinanet,offline,run_harness` + - Workflow: + * `_gpu_memory.32,rnnt,offline,run_harness` + - Workflow: + * `_gpu_memory.40,3d-unet_,offline,run_harness` + - Workflow: + * `_gpu_memory.40,bert_,offline,run_harness` + - Workflow: + * `_gpu_memory.40,dlrm_,offline,run_harness` + - Workflow: + * `_gpu_memory.40,gptj_,offline,run_harness` + - Workflow: + * `_gpu_memory.40,resnet50,offline,run_harness` + - Workflow: + * `_gpu_memory.40,retinanet,offline,run_harness` + - Workflow: + * `_gpu_memory.40,rnnt,offline,run_harness` + - Workflow: + * `_gpu_memory.48,3d-unet_,offline,run_harness` + - Workflow: + * `_gpu_memory.48,bert_,offline,run_harness` + - Workflow: + * `_gpu_memory.48,dlrm_,offline,run_harness` + - Workflow: + * `_gpu_memory.48,gptj_,offline,run_harness` + - Workflow: + * `_gpu_memory.48,resnet50,offline,run_harness` + - Workflow: + * `_gpu_memory.48,retinanet,offline,run_harness` + - Workflow: + * `_gpu_memory.48,rnnt,offline,run_harness` + - Workflow: + * `_gpu_memory.80,3d-unet_,offline,run_harness` + - Workflow: + * `_gpu_memory.80,bert_,server,run_harness` + - Workflow: + * `_gpu_memory.80,dlrm_,offline,run_harness` + - Workflow: + * `_gpu_memory.80,gptj_,offline,run_harness` + - Workflow: + * `_gpu_memory.80,resnet50,offline,run_harness` + - Workflow: + * `_gpu_memory.80,retinanet,offline,run_harness` + - Workflow: + * `_gpu_memory.80,rnnt,offline,run_harness` + - Workflow: + * `_l4,3d-unet_,offline,run_harness` + - Workflow: + * `_l4,bert_,offline,run_harness` + - Workflow: + * `_l4,bert_,server,run_harness` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_GRAPHS_MAX_SEQLEN*: `200` + - *CM_MLPERF_NVIDIA_HARNESS_SERVER_NUM_ISSUE_QUERY_THREADS*: `1` + - *CM_MLPERF_NVIDIA_HARNESS_SOFT_DROP*: `1.0` + - *CM_MLPERF_NVIDIA_HARNESS_USE_SMALL_TILE_GEMM_PLUGIN*: `True` + - Workflow: + * `_l4,dlrm_,offline,run_harness` + - Workflow: + * `_l4,resnet50` + - Workflow: + * `_l4,resnet50,offline,run_harness` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS*: `2` + - *CM_MLPERF_NVIDIA_HARNESS_GPU_INFERENCE_STREAMS*: `1` + - *CM_MLPERF_NVIDIA_HARNESS_USE_GRAPHS*: `True` + - Workflow: + * `_l4,resnet50,server,run_harness` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS*: `9` + - *CM_MLPERF_NVIDIA_HARNESS_GPU_INFERENCE_STREAMS*: `2` + - *CM_MLPERF_NVIDIA_HARNESS_USE_GRAPHS*: `True` + - *CM_MLPERF_NVIDIA_HARNESS_USE_DEQUE_LIMIT*: `True` + - *CM_MLPERF_NVIDIA_HARNESS_DEQUE_TIMEOUT_USEC*: `2000` + - *CM_MLPERF_NVIDIA_HARNESS_USE_CUDA_THREAD_PER_DEVICE*: `True` + - Workflow: + * `_l4,retinanet,offline,run_harness` + - Workflow: + * `_l4,retinanet,server,run_harness` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_GPU_INFERENCE_STREAMS*: `2` + - *CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS*: `2` + - *CM_MLPERF_NVIDIA_HARNESS_USE_DEQUE_LIMIT*: `True` + - *CM_MLPERF_NVIDIA_HARNESS_DEQUE_TIMEOUT_USEC*: `30000` + - *CM_MLPERF_NVIDIA_HARNESS_WORKSPACE_SIZE*: `20000000000` + - Workflow: + * `_l4,rnnt,offline,run_harness` + - Workflow: + * `_l4,rnnt,server,run_harness` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_AUDIO_BATCH_SIZE*: `64` + - *CM_MLPERF_NVIDIA_HARNESS_AUDIO_BUFFER_NUM_LINES*: `1024` + - *CM_MLPERF_NVIDIA_HARNESS_NUM_WARMUPS*: `1024` + - Workflow: + * `_multistream,resnet50` + - Environment variables: + - *SKIP_POLICIES*: `1` + - Workflow: + * `_orin,rnnt,singlestream,run_harness` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_NUM_WARMUPS*: `1` + - Workflow: + * `_resnet50,multistream,run_harness,num-gpus.1` + - Workflow: + * `_resnet50,multistream,run_harness,num-gpus.2` + - Workflow: + * `_resnet50,server,run_harness` + - Workflow: + * `_retinanet,multistream,run_harness` + - Workflow: + * `_retinanet,server,run_harness` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS*: `2` + - *CM_MLPERF_NVIDIA_HARNESS_GPU_INFERENCE_STREAMS*: `2` + - Workflow: + * `_rtx_4090,3d-unet_,offline,run_harness` + - Workflow: + * `_rtx_4090,3d-unet_,server,run_harness` + - Workflow: + * `_rtx_4090,bert_,offline,run_harness` + - Workflow: + * `_rtx_4090,bert_,server,run_harness` + - Workflow: + * `_rtx_4090,dlrm_,offline,run_harness` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_EMBEDDING_WEIGHTS_ON_GPU_PART*: `0.30` + - Workflow: + * `_rtx_4090,gptj_,offline,run_harness` + - Workflow: + * `_rtx_4090,gptj_,server,run_harness` + - Workflow: + * `_rtx_4090,resnet50,offline,run_harness` + - Workflow: + * `_rtx_4090,resnet50,server,run_harness` + - Workflow: + * `_rtx_4090,retinanet,offline,run_harness` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS*: `2` + - *CM_MLPERF_NVIDIA_HARNESS_GPU_INFERENCE_STREAMS*: `2` + - Workflow: + * `_rtx_4090,retinanet,server,run_harness` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS*: `2` + - *CM_MLPERF_NVIDIA_HARNESS_GPU_INFERENCE_STREAMS*: `2` + - Workflow: + * `_rtx_4090,rnnt,offline,run_harness` + - Workflow: + * `_rtx_4090,rnnt,server,run_harness` + - Workflow: + * `_rtx_6000_ada,3d-unet_,offline,run_harness` + - Workflow: + * `_rtx_6000_ada,3d-unet_,server,run_harness` + - Workflow: + * `_rtx_6000_ada,bert_,offline,run_harness` + - Workflow: + * `_rtx_6000_ada,bert_,server,run_harness` + - Workflow: + * `_rtx_6000_ada,dlrm_,offline,run_harness` + - Workflow: + * `_rtx_6000_ada,resnet50,offline,run_harness` + - Workflow: + * `_rtx_6000_ada,resnet50,server,run_harness` + - Workflow: + * `_rtx_6000_ada,retinanet,offline,run_harness` + - Workflow: + * `_rtx_6000_ada,retinanet,server,run_harness` + - Workflow: + * `_rtx_6000_ada,rnnt,offline,run_harness` + - Workflow: + * `_rtx_6000_ada,rnnt,server,run_harness` + - Workflow: + * `_rtx_a6000,3d-unet_,offline,run_harness` + - Workflow: + * `_rtx_a6000,3d-unet_,server,run_harness` + - Workflow: + * `_rtx_a6000,bert_,offline,run_harness` + - Workflow: + * `_rtx_a6000,bert_,server,run_harness` + - Workflow: + * `_rtx_a6000,dlrm_,offline,run_harness` + - Workflow: + * `_rtx_a6000,resnet50,offline,run_harness` + - Workflow: + * `_rtx_a6000,resnet50,server,run_harness` + - Workflow: + * `_rtx_a6000,retinanet,offline,run_harness` + - Workflow: + * `_rtx_a6000,retinanet,server,run_harness` + - Workflow: + * `_rtx_a6000,rnnt,offline,run_harness` + - Workflow: + * `_rtx_a6000,rnnt,server,run_harness` + - Workflow: + * `_run-harness` + - Workflow: + * `_singlestream,resnet50` + - Environment variables: + - *SKIP_POLICIES*: `1` + - Workflow: + * `_singlestream,run_harness` + - Workflow: + * `_t4,3d-unet_,offline,run_harness` + - Workflow: + * `_t4,bert_,offline,run_harness` + - Workflow: + * `_t4,bert_,server,run_harness` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_GRAPHS_MAX_SEQLEN*: `240` + - *CM_MLPERF_NVIDIA_HARNESS_SERVER_NUM_ISSUE_QUERY_THREADS*: `0` + - *CM_MLPERF_NVIDIA_HARNESS_USE_SMALL_TILE_GEMM_PLUGIN*: `no` + - Workflow: + * `_t4,dlrm_,offline,run_harness` + - Workflow: + * `_t4,resnet50` + - Workflow: + * `_t4,resnet50,offline,run_harness` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS*: `4` + - Workflow: + * `_t4,resnet50,server,run_harness` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_GPU_INFERENCE_STREAMS*: `2` + - *CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS*: `4` + - *CM_MLPERF_NVIDIA_HARNESS_USE_DEQUE_LIMIT*: `True` + - *CM_MLPERF_NVIDIA_HARNESS_DEQUE_TIMEOUT_USEC*: `2000` + - *CM_MLPERF_NVIDIA_HARNESS_SOFT_DROP*: `0.993` + - Workflow: + * `_t4,retinanet,offline,run_harness` + - Workflow: + * `_t4,retinanet,server,run_harness` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_GPU_INFERENCE_STREAMS*: `2` + - *CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS*: `2` + - *CM_MLPERF_NVIDIA_HARNESS_USE_DEQUE_LIMIT*: `True` + - *CM_MLPERF_NVIDIA_HARNESS_DEQUE_TIMEOUT_USEC*: `20000` + - *CM_MLPERF_NVIDIA_HARNESS_WORKSPACE_SIZE*: `20000000000` + - Workflow: + * `_t4,rnnt,offline,run_harness` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS*: `4` + - *CM_MLPERF_NVIDIA_HARNESS_USE_GRAPHS*: `True` + - *CM_MLPERF_NVIDIA_HARNESS_AUDIO_BATCH_SIZE*: `128` + - *CM_MLPERF_NVIDIA_HARNESS_DISABLE_ENCODER_PLUGIN*: `True` + - Workflow: + * `_t4,rnnt,server,run_harness` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS*: `4` + - *CM_MLPERF_NVIDIA_HARNESS_USE_GRAPHS*: `True` + - *CM_MLPERF_NVIDIA_HARNESS_AUDIO_BATCH_SIZE*: `128` + - *CM_MLPERF_NVIDIA_HARNESS_DISABLE_ENCODER_PLUGIN*: `True` + - Workflow: + +
+ + + * Group "**backend**" +
+ Click here to expand this section. + + * **`_tensorrt`** (default) + - Environment variables: + - *CM_MLPERF_BACKEND*: `tensorrt` + - *CM_MLPERF_BACKEND_NAME*: `TensorRT` + - Workflow: + +
+ + + * Group "**batch-size**" +
+ Click here to expand this section. + + * `_batch_size.#` + - Environment variables: + - *CM_MODEL_BATCH_SIZE*: `#` + - *CM_MLPERF_NVIDIA_HARNESS_GPU_BATCH_SIZE*: `#` + - Workflow: + +
+ + + * Group "**build-engine-options**" +
+ Click here to expand this section. + + * `_build_engine_options.#` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_EXTRA_BUILD_ENGINE_OPTIONS*: `#` + - Workflow: + +
+ + + * Group "**device**" +
+ Click here to expand this section. + + * `_cpu` + - Environment variables: + - *CM_MLPERF_DEVICE*: `cpu` + - Workflow: + * **`_cuda`** (default) + - Environment variables: + - *CM_MLPERF_DEVICE*: `gpu` + - *CM_MLPERF_DEVICE_LIB_NAMESPEC*: `cudart` + - Workflow: + +
+ + + * Group "**device-memory**" +
+ Click here to expand this section. + + * `_gpu_memory.16` + - Environment variables: + - *CM_NVIDIA_GPU_MEMORY*: `16` + - Workflow: + * `_gpu_memory.24` + - Environment variables: + - *CM_NVIDIA_GPU_MEMORY*: `24` + - Workflow: + * `_gpu_memory.32` + - Environment variables: + - *CM_NVIDIA_GPU_MEMORY*: `32` + - Workflow: + * `_gpu_memory.40` + - Environment variables: + - *CM_NVIDIA_GPU_MEMORY*: `40` + - Workflow: + * `_gpu_memory.48` + - Environment variables: + - *CM_NVIDIA_GPU_MEMORY*: `48` + - Workflow: + * `_gpu_memory.8` + - Environment variables: + - *CM_NVIDIA_GPU_MEMORY*: `8` + - Workflow: + * `_gpu_memory.80` + - Environment variables: + - *CM_NVIDIA_GPU_MEMORY*: `80` + - Workflow: + +
+ + + * Group "**dla-batch-size**" +
+ Click here to expand this section. + + * `_dla_batch_size.#` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_DLA_BATCH_SIZE*: `#` + - *CM_MLPERF_SUT_NAME_RUN_CONFIG_SUFFIX2*: `dla_batch_size.#` + - Workflow: + +
+ + + * Group "**gpu-connection**" +
+ Click here to expand this section. + + * `_pcie` + - Workflow: + * `_sxm` + - Workflow: + +
+ + + * Group "**gpu-name**" +
+ Click here to expand this section. + + * `_a100` + - Environment variables: + - *CM_NVIDIA_CUSTOM_GPU*: `yes` + - Workflow: + * `_a6000` + - Environment variables: + - *CM_NVIDIA_CUSTOM_GPU*: `yes` + - Workflow: + * `_custom` + - Environment variables: + - *CM_NVIDIA_CUSTOM_GPU*: `yes` + - *CM_MODEL_BATCH_SIZE*: `` + - *CM_MLPERF_NVIDIA_HARNESS_GPU_BATCH_SIZE*: `<<>>` + - Workflow: + * `_l4` + - Environment variables: + - *CM_NVIDIA_CUSTOM_GPU*: `yes` + - Workflow: + * `_orin` + - Environment variables: + - *CM_NVIDIA_CUSTOM_GPU*: `yes` + - *CM_MODEL_BATCH_SIZE*: `` + - *CM_MLPERF_NVIDIA_HARNESS_GPU_BATCH_SIZE*: `<<>>` + - Workflow: + * `_rtx_4090` + - Environment variables: + - *CM_NVIDIA_CUSTOM_GPU*: `yes` + - Workflow: + * `_rtx_6000_ada` + - Environment variables: + - *CM_NVIDIA_CUSTOM_GPU*: `yes` + - Workflow: + * `_t4` + - Environment variables: + - *CM_NVIDIA_CUSTOM_GPU*: `yes` + - Workflow: + +
+ + + * Group "**loadgen-scenario**" +
+ Click here to expand this section. + + * `_multistream` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `MultiStream` + - Workflow: + * `_offline` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `Offline` + - Workflow: + * `_server` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `Server` + - Workflow: + * `_singlestream` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `SingleStream` + - *CUDA_VISIBLE_DEVICES_NOT_USED*: `0` + - Workflow: + +
+ + + * Group "**model**" +
+ Click here to expand this section. + + * `_3d-unet-99` + - Environment variables: + - *CM_MODEL*: `3d-unet-99` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/5597155/files/3dunet_kits19_128x128x128.onnx` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, affine fusion` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int8` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` + - Workflow: + * `_3d-unet-99.9` + - Environment variables: + - *CM_MODEL*: `3d-unet-99.9` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/5597155/files/3dunet_kits19_128x128x128.onnx` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, affine fusion` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int8` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` + - Workflow: + * `_bert-99` + - Environment variables: + - *CM_MODEL*: `bert-99` + - *CM_NOT_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/3750364/files/bert_large_v1_1_fake_quant.onnx` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, affine fusion` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int32` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` + - Workflow: + * `_bert-99.9` + - Environment variables: + - *CM_MODEL*: `bert-99.9` + - *CM_NOT_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/3733910/files/model.onnx` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, affine fusion` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int32` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp16` + - Workflow: + * `_dlrm-v2-99` + - Environment variables: + - *CM_MODEL*: `dlrm-v2-99` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `affine fusion` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp16` + - Workflow: + * `_dlrm-v2-99.9` + - Environment variables: + - *CM_MODEL*: `dlrm-v2-99.9` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `affine fusion` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp16` + - Workflow: + * `_gptj-99` + - Environment variables: + - *CM_MODEL*: `gptj-99` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, affine fusion` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int32` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp16` + - Workflow: + * `_gptj-99.9` + - Environment variables: + - *CM_MODEL*: `gptj-99.9` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, affine fusion` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int32` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp16` + - Workflow: + * **`_resnet50`** (default) + - Environment variables: + - *CM_MODEL*: `resnet50` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, affine fusion` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int8` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_onnx-graphsurgeon + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.onnx + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_retinanet` + - Environment variables: + - *CM_MODEL*: `retinanet` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/6617981/files/resnext50_32x4d_fpn.pth` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, affine fusion` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int8` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_Pillow + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torch + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torchvision + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_opencv-python + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_numpy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_pycocotools + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_onnx-graphsurgeon + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.onnx + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_rnnt` + - Environment variables: + - *CM_MODEL*: `rnnt` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/3662521/files/DistributedDataParallel_1576581068.9962234-epoch-100.pt` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, affine fusion` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp16` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp16` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_toml + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torchvision + * CM names: `--adr.['torchvision']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torch + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_nvidia-apex + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_unidecode + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_inflect + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_librosa + * CM names: `--adr.['librosa']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_sox + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-sys-util,_sox + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + +
+ + + * Group "**num-gpus**" +
+ Click here to expand this section. + + * `_num-gpus.#` + - Environment variables: + - *CM_NVIDIA_NUM_GPUS*: `#` + - Workflow: + * **`_num-gpus.1`** (default) + - Environment variables: + - *CM_NVIDIA_NUM_GPUS*: `1` + - Workflow: + +
+ + + * Group "**power-mode**" +
+ Click here to expand this section. + + * `_maxn` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_MAXN*: `True` + - Workflow: + * `_maxq` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_MAXQ*: `True` + - Workflow: + +
+ + + * Group "**run-mode**" +
+ Click here to expand this section. + + * `_build` + - Environment variables: + - *MLPERF_NVIDIA_RUN_COMMAND*: `build` + - *CM_MLPERF_NVIDIA_HARNESS_RUN_MODE*: `build` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,cmake + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + * get,generic,sys-util,_glog-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic,sys-util,_gflags-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic,sys-util,_libgmock-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic,sys-util,_libre2-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic,sys-util,_libnuma-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic,sys-util,_libboost-all-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic,sys-util,_rapidjson-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,cuda,_cudnn + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * get,tensorrt + * CM names: `--adr.['tensorrt']...` + - CM script: [get-tensorrt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tensorrt) + * build,nvidia,inference,server + * CM names: `--adr.['nvidia-inference-server']...` + - CM script: [build-mlperf-inference-server-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/build-mlperf-inference-server-nvidia) + * `_build_engine` + - Aliases: `_build-engine` + - Environment variables: + - *MLPERF_NVIDIA_RUN_COMMAND*: `generate_engines` + - *CM_MLPERF_NVIDIA_HARNESS_RUN_MODE*: `generate_engines` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,cuda,_cudnn + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * get,tensorrt + * CM names: `--adr.['tensorrt']...` + - CM script: [get-tensorrt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tensorrt) + * build,nvidia,inference,server + * CM names: `--adr.['nvidia-inference-server']...` + - CM script: [build-mlperf-inference-server-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/build-mlperf-inference-server-nvidia) + * reproduce,mlperf,inference,nvidia,harness,_preprocess_data + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MODEL': ['dlrm-v2-99', 'dlrm-v2-99.9']}` + - CM script: [app-mlperf-inference-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-nvidia) + * reproduce,mlperf,inference,nvidia,harness,_download_model + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MODEL': ['retinanet_old', 'resnet50', 'bert-99', 'bert-99.9', 'dlrm-v2-99', 'dlrm-v2-99.9']}` + - CM script: [app-mlperf-inference-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-nvidia) + * reproduce,mlperf,inference,nvidia,harness,_calibrate + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['retinanet']}` + - CM script: [app-mlperf-inference-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-nvidia) + * `_calibrate` + - Environment variables: + - *MLPERF_NVIDIA_RUN_COMMAND*: `calibrate` + - *CM_MLPERF_NVIDIA_HARNESS_RUN_MODE*: `calibrate` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * reproduce,mlperf,inference,nvidia,harness,_download_model + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MODEL': ['retinanet_old', 'resnet50', 'bert-99', 'bert-99.9', 'dlrm-v2-99', 'dlrm-v2-99.9']}` + - CM script: [app-mlperf-inference-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-nvidia) + * `_download_model` + - Environment variables: + - *MLPERF_NVIDIA_RUN_COMMAND*: `download_model` + - *CM_MLPERF_NVIDIA_HARNESS_RUN_MODE*: `download_model` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_torch_cuda + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['retinanet']}` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_prebuild` + - Environment variables: + - *MLPERF_NVIDIA_RUN_COMMAND*: `prebuild` + - *CM_MLPERF_NVIDIA_HARNESS_RUN_MODE*: `prebuild` + - Workflow: + * `_preprocess_data` + - Environment variables: + - *MLPERF_NVIDIA_RUN_COMMAND*: `preprocess_data` + - *CM_MLPERF_NVIDIA_HARNESS_RUN_MODE*: `preprocess_data` + - Workflow: + * **`_run_harness`** (default) + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_RUN_MODE*: `run_harness` + - *MLPERF_NVIDIA_RUN_COMMAND*: `run_harness` + - *CM_CALL_MLPERF_RUNNER*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,cuda,_cudnn + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * get,tensorrt + * CM names: `--adr.['tensorrt']...` + - CM script: [get-tensorrt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tensorrt) + * build,nvidia,inference,server + * CM names: `--adr.['nvidia-inference-server']...` + - CM script: [build-mlperf-inference-server-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/build-mlperf-inference-server-nvidia) + * reproduce,mlperf,inference,nvidia,harness,_build_engine + * CM names: `--adr.['build-engine']...` + - CM script: [app-mlperf-inference-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-nvidia) + * reproduce,mlperf,inference,nvidia,harness,_preprocess_data + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MODEL': ['dlrm-v2-99', 'dlrm-v2-99.9']}` + - CM script: [app-mlperf-inference-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-nvidia) + * reproduce,mlperf,inference,nvidia,harness,_download_model + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MODEL': ['retinanet', 'resnet50', 'bert-99', 'bert-99.9', 'dlrm-v2-99', 'dlrm-v2-99.9']}` + - CM script: [app-mlperf-inference-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-nvidia) + +
+ + + * Group "**triton**" +
+ Click here to expand this section. + + * `_use_triton` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_USE_TRITON*: `yes` + - *CM_MLPERF_SUT_NAME_RUN_CONFIG_SUFFIX3*: `using_triton` + - Workflow: + +
+ + +#### Default variations + +`_cuda,_num-gpus.1,_resnet50,_run_harness,_tensorrt` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--audio_buffer_num_lines=value` → `CM_MLPERF_NVIDIA_HARNESS_AUDIO_BUFFER_NUM_LINES=value` +* `--count=value` → `CM_MLPERF_LOADGEN_QUERY_COUNT=value` +* `--deque_timeout_usec=value` → `CM_MLPERF_NVIDIA_HARNESS_DEQUE_TIMEOUT_USEC=value` +* `--devices=value` → `CM_MLPERF_NVIDIA_HARNESS_DEVICES=value` +* `--dla_batch_size=value` → `CM_MLPERF_NVIDIA_HARNESS_DLA_BATCH_SIZE=value` +* `--dla_copy_streams=value` → `CM_MLPERF_NVIDIA_HARNESS_DLA_COPY_STREAMS=value` +* `--dla_inference_streams=value` → `CM_MLPERF_NVIDIA_HARNESS_DLA_INFERENCE_STREAMS=value` +* `--embedding_weights_on_gpu_part=value` → `CM_MLPERF_NVIDIA_HARNESS_EMBEDDING_WEIGHTS_ON_GPU_PART=value` +* `--enable_sort=value` → `CM_MLPERF_NVIDIA_HARNESS_ENABLE_SORT=value` +* `--end_on_device=value` → `CM_MLPERF_NVIDIA_HARNESS_END_ON_DEVICE=value` +* `--extra_run_options=value` → `CM_MLPERF_NVIDIA_HARNESS_EXTRA_RUN_OPTIONS=value` +* `--gpu_batch_size=value` → `CM_MLPERF_NVIDIA_HARNESS_GPU_BATCH_SIZE=value` +* `--gpu_copy_streams=value` → `CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS=value` +* `--gpu_inference_streams=value` → `CM_MLPERF_NVIDIA_HARNESS_GPU_INFERENCE_STREAMS=value` +* `--graphs_max_seqlen=value` → `CM_MLPERF_NVIDIA_HARNESS_GRAPHS_MAX_SEQLEN=value` +* `--input_format=value` → `CM_MLPERF_NVIDIA_HARNESS_INPUT_FORMAT=value` +* `--log_dir=value` → `CM_MLPERF_NVIDIA_HARNESS_LOG_DIR=value` +* `--make_cmd=value` → `MLPERF_NVIDIA_RUN_COMMAND=value` +* `--max_batchsize=value` → `CM_MLPERF_LOADGEN_MAX_BATCHSIZE=value` +* `--max_dlas=value` → `CM_MLPERF_NVIDIA_HARNESS_MAX_DLAS=value` +* `--mlperf_conf=value` → `CM_MLPERF_CONF=value` +* `--mode=value` → `CM_MLPERF_LOADGEN_MODE=value` +* `--multistream_target_latency=value` → `CM_MLPERF_LOADGEN_MULTISTREAM_TARGET_LATENCY=value` +* `--num_issue_query_threads=value` → `CM_MLPERF_NVIDIA_HARNESS_NUM_ISSUE_QUERY_THREADS=value` +* `--num_sort_segments=value` → `CM_MLPERF_NVIDIA_HARNESS_NUM_SORT_SEGMENTS=value` +* `--num_warmups=value` → `CM_MLPERF_NVIDIA_HARNESS_NUM_WARMUPS=value` +* `--offline_target_qps=value` → `CM_MLPERF_LOADGEN_OFFLINE_TARGET_QPS=value` +* `--output_dir=value` → `CM_MLPERF_OUTPUT_DIR=value` +* `--performance_sample_count=value` → `CM_MLPERF_LOADGEN_PERFORMANCE_SAMPLE_COUNT=value` +* `--power_setting=value` → `CM_MLPERF_NVIDIA_HARNESS_POWER_SETTING=value` +* `--rerun=value` → `CM_RERUN=value` +* `--run_infer_on_copy_streams=value` → `CM_MLPERF_NVIDIA_HARNESS_RUN_INFER_ON_COPY_STREAMS=value` +* `--scenario=value` → `CM_MLPERF_LOADGEN_SCENARIO=value` +* `--server_target_qps=value` → `CM_MLPERF_LOADGEN_SERVER_TARGET_QPS=value` +* `--singlestream_target_latency=value` → `CM_MLPERF_LOADGEN_SINGLESTREAM_TARGET_LATENCY=value` +* `--skip_postprocess=value` → `CM_MLPERF_NVIDIA_HARNESS_SKIP_POSTPROCESS=value` +* `--skip_preprocess=value` → `CM_SKIP_PREPROCESS_DATASET=value` +* `--skip_preprocessing=value` → `CM_SKIP_PREPROCESS_DATASET=value` +* `--soft_drop=value` → `CM_MLPERF_NVIDIA_HARNESS_SOFT_DROP=value` +* `--start_from_device=value` → `CM_MLPERF_NVIDIA_HARNESS_START_FROM_DEVICE=value` +* `--target_latency=value` → `CM_MLPERF_LOADGEN_TARGET_LATENCY=value` +* `--target_qps=value` → `CM_MLPERF_LOADGEN_TARGET_QPS=value` +* `--use_cuda_thread_per_device=value` → `CM_MLPERF_NVIDIA_HARNESS_USE_CUDA_THREAD_PER_DEVICE=value` +* `--use_deque_limit=value` → `CM_MLPERF_NVIDIA_HARNESS_USE_DEQUE_LIMIT=value` +* `--use_fp8=value` → `CM_MLPERF_NVIDIA_HARNESS_USE_FP8=value` +* `--use_graphs=value` → `CM_MLPERF_NVIDIA_HARNESS_USE_GRAPHS=value` +* `--use_small_tile_gemm_plugin=value` → `CM_MLPERF_NVIDIA_HARNESS_USE_SMALL_TILE_GEMM_PLUGIN=value` +* `--use_triton=value` → `CM_MLPERF_NVIDIA_HARNESS_USE_TRITON=value` +* `--user_conf=value` → `CM_MLPERF_USER_CONF=value` +* `--workspace_size=value` → `CM_MLPERF_NVIDIA_HARNESS_WORKSPACE_SIZE=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "audio_buffer_num_lines":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_BATCH_COUNT: `1` +* CM_BATCH_SIZE: `1` +* CM_FAST_COMPILATION: `yes` +* CM_MLPERF_LOADGEN_SCENARIO: `Offline` +* CM_MLPERF_LOADGEN_MODE: `performance` +* CM_SKIP_PREPROCESS_DATASET: `no` +* CM_SKIP_MODEL_DOWNLOAD: `no` +* CM_MLPERF_SUT_NAME_IMPLEMENTATION_PREFIX: `nvidia_original` +* CM_MLPERF_SKIP_RUN: `no` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-nvidia/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,mlperf,inference,nvidia,scratch,space + * CM names: `--adr.['nvidia-scratch-space']...` + - CM script: [get-mlperf-inference-nvidia-scratch-space](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-nvidia-scratch-space) + * get,generic-python-lib,_mlperf_logging + * CM names: `--adr.['mlperf-logging']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,dataset,original,imagenet,_full + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['resnet50']}` + * CM names: `--adr.['imagenet-original']...` + - CM script: [get-dataset-imagenet-val](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-val) + * get,ml-model,resnet50,_fp32,_onnx,_opset-8 + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['resnet50']}` + * CM names: `--adr.['resnet50-model', 'ml-model']...` + - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) + * get,dataset,original,kits19 + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['3d-unet-99-disabled', '3d-unet-99.9-disabled']}` + * CM names: `--adr.['kits19-original']...` + - CM script: [get-dataset-kits19](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-kits19) + * get,dataset,original,librispeech + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['rnnt']}` + * CM names: `--adr.['librispeech-original']...` + - CM script: [get-dataset-librispeech](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-librispeech) + * get,dataset,preprocessed,criteo + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['dlrm-v2-99', 'dlrm-v2-99.9']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'DLRM_DATA_PATH': [True]}` + * CM names: `--adr.['criteo-preprocessed']...` + - CM script: [get-preprocessed-dataset-criteo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-criteo) + * get,ml-model,dlrm,_pytorch + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['dlrm-v2-99', 'dlrm-v2-99.9']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'DLRM_DATA_PATH': [True]}` + * CM names: `--adr.['dlrm-model']...` + - CM script: [get-ml-model-dlrm-terabyte](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-dlrm-terabyte) + * get,ml-model,bert,_onnx,_fp32 + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['bert-99', 'bert-99.9']}` + * CM names: `--adr.['bert-model', 'bert-model-fp32']...` + - CM script: [get-ml-model-bert-large-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-bert-large-squad) + * get,ml-model,bert,_onnx,_int8 + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['bert-99', 'bert-99.9']}` + * CM names: `--adr.['bert-model', 'bert-model-int8']...` + - CM script: [get-ml-model-bert-large-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-bert-large-squad) + * get,squad-vocab + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['bert-99', 'bert-99.9']}` + * CM names: `--adr.['bert-vocab']...` + - CM script: [get-dataset-squad-vocab](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-squad-vocab) + * get,dataset,original,openimages,_validation,_full,_custom-annotations + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['retinanet']}` + * CM names: `--adr.['openimages-original']...` + - CM script: [get-dataset-openimages](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openimages) + * get,dataset,original,openimages,_calibration + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['retinanet']}` + * CM names: `--adr.['openimages-calibration']...` + - CM script: [get-dataset-openimages](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openimages) + * get,dataset,original,openorca + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['gptj-99', 'gptj-99.9'], 'CM_MLPERF_NVIDIA_HARNESS_RUN_MODE': ['preprocess_dataset']}` + * CM names: `--adr.['openorca-original']...` + - CM script: [get-dataset-openorca](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openorca) + * get,mlcommons,inference,src + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,nvidia,mlperf,inference,common-code + * CM names: `--adr.['nvidia-inference-common-code']...` + - CM script: [get-mlperf-inference-nvidia-common-code](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-nvidia-common-code) + * generate,user-conf,mlperf,inference + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_NVIDIA_HARNESS_RUN_MODE': ['run_harness']}` + * CM names: `--adr.['user-conf-generator']...` + - CM script: [generate-mlperf-inference-user-conf](https://github.com/mlcommons/cm4mlops/tree/master/script/generate-mlperf-inference-user-conf) + * get,generic-python-lib,_package.nvmitten,_path./opt/nvmitten-0.1.3-cp38-cp38-linux_x86_64.whl + * Enable this dependency only if all ENV vars are set:
+`{'CM_RUN_STATE_DOCKER': ['yes', True, 'True']}` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,nvidia,mitten + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_RUN_STATE_DOCKER': ['yes', True, 'True']}` + - CM script: [get-nvidia-mitten](https://github.com/mlcommons/cm4mlops/tree/master/script/get-nvidia-mitten) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-nvidia/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-nvidia/_cm.yaml)*** + * get,ml-model,gptj,_pytorch,_rclone + * Enable this dependency only if all ENV vars are set:
+`{'CM_REQUIRE_GPTJ_MODEL_DOWNLOAD': ['yes'], 'CM_MLPERF_NVIDIA_HARNESS_RUN_MODE': ['download_model', 'preprocess_data']}` + * CM names: `--adr.['gptj-model']...` + - CM script: [get-ml-model-gptj](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-gptj) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-nvidia/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-nvidia/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-nvidia/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-nvidia/_cm.yaml)*** + * benchmark-mlperf + * Enable this dependency only if all ENV vars are set:
+`{'CM_CALL_MLPERF_RUNNER': [True]}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_RUN': ['yes', True]}` + * CM names: `--adr.['runner', 'mlperf-runner']...` + - CM script: [benchmark-program-mlperf](https://github.com/mlcommons/cm4mlops/tree/master/script/benchmark-program-mlperf) + * save,mlperf,inference,state + * CM names: `--adr.['save-mlperf-inference-state']...` + - CM script: [save-mlperf-inference-implementation-state](https://github.com/mlcommons/cm4mlops/tree/master/script/save-mlperf-inference-implementation-state) + +___ +### Script output +`cmr "reproduce mlcommons mlperf inference harness nvidia-harness nvidia [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Reproduce-MLPerf-benchmarks/reproduce-mlperf-octoml-tinyml-results.md b/docs/Reproduce-MLPerf-benchmarks/reproduce-mlperf-octoml-tinyml-results.md new file mode 100644 index 0000000000..79baf27eaa --- /dev/null +++ b/docs/Reproduce-MLPerf-benchmarks/reproduce-mlperf-octoml-tinyml-results.md @@ -0,0 +1,214 @@ +Automatically generated README for this automation recipe: **reproduce-mlperf-octoml-tinyml-results** + +Category: **Reproduce MLPerf benchmarks** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=reproduce-mlperf-octoml-tinyml-results,a63803a707d04332) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-octoml-tinyml-results)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *reproduce,tiny,results,mlperf,octoml,mlcommons* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "reproduce tiny results mlperf octoml mlcommons" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=reproduce,tiny,results,mlperf,octoml,mlcommons` + +`cm run script --tags=reproduce,tiny,results,mlperf,octoml,mlcommons[,variations] [--input_flags]` + +*or* + +`cmr "reproduce tiny results mlperf octoml mlcommons"` + +`cmr "reproduce tiny results mlperf octoml mlcommons [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'reproduce,tiny,results,mlperf,octoml,mlcommons' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="reproduce,tiny,results,mlperf,octoml,mlcommons"``` + +#### Run this script via Docker (beta) + +`cm docker script "reproduce tiny results mlperf octoml mlcommons[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_NRF` + - Environment variables: + - *CM_TINY_BOARD*: `NRF5340DK` + - Workflow: + * `_NUCLEO` + - Environment variables: + - *CM_TINY_BOARD*: `NUCLEO_L4R5ZI` + - Workflow: + * `_ad` + - Environment variables: + - *CM_TINY_MODEL*: `ad` + - Workflow: + * `_cmsis_nn` + - Environment variables: + - *CM_MICROTVM_VARIANT*: `microtvm_cmsis_nn` + - Workflow: + * `_ic` + - Environment variables: + - *CM_TINY_MODEL*: `ic` + - Workflow: + * `_kws` + - Environment variables: + - *CM_TINY_MODEL*: `kws` + - Workflow: + * `_native` + - Environment variables: + - *CM_MICROTVM_VARIANT*: `microtvm_native` + - Workflow: + * `_vww` + - Environment variables: + - *CM_TINY_MODEL*: `vww` + - Workflow: + +
+ + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--flash=value` → `CM_FLASH_BOARD=value` +* `--recreate_binary=value` → `CM_RECREATE_BINARY=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "flash":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `r1.0` + +* `r1.0` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-octoml-tinyml-results/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,python3 + * CM names: `--adr.['python3', 'python']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,zephyr + * CM names: `--adr.['zephyr']...` + - CM script: [get-zephyr](https://github.com/mlcommons/cm4mlops/tree/master/script/get-zephyr) + * get,zephyr-sdk + * CM names: `--adr.['zephyr-sdk']...` + - CM script: [get-zephyr-sdk](https://github.com/mlcommons/cm4mlops/tree/master/script/get-zephyr-sdk) + * get,cmsis + * CM names: `--adr.['cmsis']...` + - CM script: [get-cmsis_5](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmsis_5) + * get,microtvm + * CM names: `--adr.['microtvm']...` + - CM script: [get-microtvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-microtvm) + * get,cmake + * CM names: `--adr.['cmake']...` + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + * get,gcc + - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-octoml-tinyml-results/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-octoml-tinyml-results/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-octoml-tinyml-results/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-octoml-tinyml-results/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-octoml-tinyml-results/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-octoml-tinyml-results/_cm.json)*** + * flash,tiny,mlperf + * Enable this dependency only if all ENV vars are set:
+`{'CM_FLASH_BOARD': ['True']}` + - CM script: [flash-tinyml-binary](https://github.com/mlcommons/cm4mlops/tree/master/script/flash-tinyml-binary) + +___ +### Script output +`cmr "reproduce tiny results mlperf octoml mlcommons [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_TINY_*` +#### New environment keys auto-detected from customize + +* `CM_TINY_MODEL` \ No newline at end of file diff --git a/docs/Reproduce-MLPerf-benchmarks/reproduce-mlperf-training-nvidia.md b/docs/Reproduce-MLPerf-benchmarks/reproduce-mlperf-training-nvidia.md new file mode 100644 index 0000000000..86f08d6b80 --- /dev/null +++ b/docs/Reproduce-MLPerf-benchmarks/reproduce-mlperf-training-nvidia.md @@ -0,0 +1,169 @@ +Automatically generated README for this automation recipe: **reproduce-mlperf-training-nvidia** + +Category: **Reproduce MLPerf benchmarks** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=reproduce-mlperf-training-nvidia,f183628f292341e2) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-training-nvidia)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *reproduce,mlcommons,mlperf,train,training,nvidia-training,nvidia* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "reproduce mlcommons mlperf train training nvidia-training nvidia" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=reproduce,mlcommons,mlperf,train,training,nvidia-training,nvidia` + +`cm run script --tags=reproduce,mlcommons,mlperf,train,training,nvidia-training,nvidia[,variations] [--input_flags]` + +*or* + +`cmr "reproduce mlcommons mlperf train training nvidia-training nvidia"` + +`cmr "reproduce mlcommons mlperf train training nvidia-training nvidia [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'reproduce,mlcommons,mlperf,train,training,nvidia-training,nvidia' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="reproduce,mlcommons,mlperf,train,training,nvidia-training,nvidia"``` + +#### Run this script via Docker (beta) + +`cm docker script "reproduce mlcommons mlperf train training nvidia-training nvidia[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * Group "**benchmark**" +
+ Click here to expand this section. + + * `_resnet` + - Environment variables: + - *CM_MLPERF_TRAINING_BENCHMARK*: `resnet` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * prepare,mlperf,training,resnet,_nvidia + * CM names: `--adr.['prepare-training-data', 'nvidia-training-data']...` + - CM script: [prepare-training-data-resnet](https://github.com/mlcommons/cm4mlops/tree/master/script/prepare-training-data-resnet) + * get,nvidia,training,code + * CM names: `--adr.['nvidia-training-code']...` + - CM script: [get-mlperf-training-nvidia-code](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-training-nvidia-code) + +
+ + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--results_dir=value` → `CM_MLPERF_RESULTS_DIR=value` +* `--system_conf_name=value` → `CM_MLPERF_NVIDIA_TRAINING_SYSTEM_CONF_NAME=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "results_dir":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +* `r2.1` +* `r3.0` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-training-nvidia/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,nvidia-docker + - CM script: [get-nvidia-docker](https://github.com/mlcommons/cm4mlops/tree/master/script/get-nvidia-docker) + * get,cuda + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-training-nvidia/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-training-nvidia/_cm.yaml) + 1. ***Run native script if exists*** + * [run-resnet.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-training-nvidia/run-resnet.sh) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-training-nvidia/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-training-nvidia/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-training-nvidia/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-training-nvidia/_cm.yaml) + +___ +### Script output +`cmr "reproduce mlcommons mlperf train training nvidia-training nvidia [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Reproduce-MLPerf-benchmarks/wrapper-reproduce-octoml-tinyml-submission.md b/docs/Reproduce-MLPerf-benchmarks/wrapper-reproduce-octoml-tinyml-submission.md new file mode 100644 index 0000000000..4854b325ce --- /dev/null +++ b/docs/Reproduce-MLPerf-benchmarks/wrapper-reproduce-octoml-tinyml-submission.md @@ -0,0 +1,140 @@ +Automatically generated README for this automation recipe: **wrapper-reproduce-octoml-tinyml-submission** + +Category: **Reproduce MLPerf benchmarks** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=wrapper-reproduce-octoml-tinyml-submission,b946001e289c4480) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/wrapper-reproduce-octoml-tinyml-submission)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *run,generate-tiny,generate,submission,tiny,generate-tiny-submission,results,mlcommons,mlperf,octoml* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "run generate-tiny generate submission tiny generate-tiny-submission results mlcommons mlperf octoml" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=run,generate-tiny,generate,submission,tiny,generate-tiny-submission,results,mlcommons,mlperf,octoml` + +`cm run script --tags=run,generate-tiny,generate,submission,tiny,generate-tiny-submission,results,mlcommons,mlperf,octoml [--input_flags]` + +*or* + +`cmr "run generate-tiny generate submission tiny generate-tiny-submission results mlcommons mlperf octoml"` + +`cmr "run generate-tiny generate submission tiny generate-tiny-submission results mlcommons mlperf octoml " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'run,generate-tiny,generate,submission,tiny,generate-tiny-submission,results,mlcommons,mlperf,octoml' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="run,generate-tiny,generate,submission,tiny,generate-tiny-submission,results,mlcommons,mlperf,octoml"``` + +#### Run this script via Docker (beta) + +`cm docker script "run generate-tiny generate submission tiny generate-tiny-submission results mlcommons mlperf octoml" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--flash=value` → `CM_FLASH_BOARD=value` +* `--recreate_binary=value` → `CM_RECREATE_BINARY=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "flash":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `r1.0` + +* `r1.0` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/wrapper-reproduce-octoml-tinyml-submission/_cm.json)*** + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/wrapper-reproduce-octoml-tinyml-submission/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/wrapper-reproduce-octoml-tinyml-submission/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/wrapper-reproduce-octoml-tinyml-submission/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/wrapper-reproduce-octoml-tinyml-submission/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/wrapper-reproduce-octoml-tinyml-submission/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/wrapper-reproduce-octoml-tinyml-submission/_cm.json) + +___ +### Script output +`cmr "run generate-tiny generate submission tiny generate-tiny-submission results mlcommons mlperf octoml " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Reproducibility-and-artifact-evaluation/get-ipol-src.md b/docs/Reproducibility-and-artifact-evaluation/get-ipol-src.md new file mode 100644 index 0000000000..755607bfb1 --- /dev/null +++ b/docs/Reproducibility-and-artifact-evaluation/get-ipol-src.md @@ -0,0 +1,146 @@ +Automatically generated README for this automation recipe: **get-ipol-src** + +Category: **Reproducibility and artifact evaluation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ipol-src,b6fd8213d03d4aa4) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ipol-src)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,ipol,journal,src,ipol-src* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get ipol journal src ipol-src" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,ipol,journal,src,ipol-src` + +`cm run script --tags=get,ipol,journal,src,ipol-src [--input_flags]` + +*or* + +`cmr "get ipol journal src ipol-src"` + +`cmr "get ipol journal src ipol-src " [--input_flags]` + + + +#### Input Flags + +* --**number**=IPOL publication number +* --**year**=IPOL publication year + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "number":...} +``` +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,ipol,journal,src,ipol-src' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,ipol,journal,src,ipol-src"``` + +#### Run this script via Docker (beta) + +`cm docker script "get ipol journal src ipol-src" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--number=value` → `CM_IPOL_NUMBER=value` +* `--year=value` → `CM_IPOL_YEAR=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "number":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ipol-src/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ipol-src/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ipol-src/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ipol-src/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ipol-src/_cm.json) + +___ +### Script output +`cmr "get ipol journal src ipol-src " [--input_flags] -j` +#### New environment keys (filter) + +* `CM_IPOL_*` +#### New environment keys auto-detected from customize + +* `CM_IPOL_PATH` \ No newline at end of file diff --git a/docs/Reproducibility-and-artifact-evaluation/process-ae-users.md b/docs/Reproducibility-and-artifact-evaluation/process-ae-users.md new file mode 100644 index 0000000000..38c4316ad0 --- /dev/null +++ b/docs/Reproducibility-and-artifact-evaluation/process-ae-users.md @@ -0,0 +1,136 @@ +Automatically generated README for this automation recipe: **process-ae-users** + +Category: **Reproducibility and artifact evaluation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=process-ae-users,5800f1ed677e4efb) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-ae-users)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *process,ae,users* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "process ae users" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=process,ae,users` + +`cm run script --tags=process,ae,users [--input_flags]` + +*or* + +`cmr "process ae users"` + +`cmr "process ae users " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'process,ae,users' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="process,ae,users"``` + +#### Run this script via Docker (beta) + +`cm docker script "process ae users" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--file=value` → `CM_PROCESS_AE_USERS_INPUT_FILE=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "file":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-ae-users/_cm.json)*** + * get,python3 + * CM names: `--adr.['python3', 'python']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-ae-users/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-ae-users/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-ae-users/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-ae-users/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-ae-users/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-ae-users/_cm.json) + +___ +### Script output +`cmr "process ae users " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Reproducibility-and-artifact-evaluation/reproduce-ipol-paper-2022-439.md b/docs/Reproducibility-and-artifact-evaluation/reproduce-ipol-paper-2022-439.md new file mode 100644 index 0000000000..5212a87b0a --- /dev/null +++ b/docs/Reproducibility-and-artifact-evaluation/reproduce-ipol-paper-2022-439.md @@ -0,0 +1,148 @@ +Automatically generated README for this automation recipe: **reproduce-ipol-paper-2022-439** + +Category: **Reproducibility and artifact evaluation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=reproduce-ipol-paper-2022-439,f9b9e5bd65e34e4f) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-ipol-paper-2022-439)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *app,python,reproduce,project,paper,ipol,journal,repro,reproducibility,pytorch,2022-439* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "app python reproduce project paper ipol journal repro reproducibility pytorch 2022-439" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=app,python,reproduce,project,paper,ipol,journal,repro,reproducibility,pytorch,2022-439` + +`cm run script --tags=app,python,reproduce,project,paper,ipol,journal,repro,reproducibility,pytorch,2022-439 [--input_flags]` + +*or* + +`cmr "app python reproduce project paper ipol journal repro reproducibility pytorch 2022-439"` + +`cmr "app python reproduce project paper ipol journal repro reproducibility pytorch 2022-439 " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'app,python,reproduce,project,paper,ipol,journal,repro,reproducibility,pytorch,2022-439' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="app,python,reproduce,project,paper,ipol,journal,repro,reproducibility,pytorch,2022-439"``` + +#### Run this script via Docker (beta) + +`cm docker script "app python reproduce project paper ipol journal repro reproducibility pytorch 2022-439" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--image1=value` → `CM_IMAGE_1=value` +* `--image2=value` → `CM_IMAGE_2=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "image1":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-ipol-paper-2022-439/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,ipol,src + * CM names: `--adr.['ipol-src']...` + - CM script: [get-ipol-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ipol-src) + * get,generic-python-lib,_torch + * CM names: `--adr.['torch']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torchvision + * CM names: `--adr.['torchvision']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-ipol-paper-2022-439/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-ipol-paper-2022-439/_cm.yaml) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-ipol-paper-2022-439/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-ipol-paper-2022-439/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-ipol-paper-2022-439/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-ipol-paper-2022-439/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-ipol-paper-2022-439/_cm.yaml) + +___ +### Script output +`cmr "app python reproduce project paper ipol journal repro reproducibility pytorch 2022-439 " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Reproducibility-and-artifact-evaluation/reproduce-micro-paper-2023-victima.md b/docs/Reproducibility-and-artifact-evaluation/reproduce-micro-paper-2023-victima.md new file mode 100644 index 0000000000..41b899e1e2 --- /dev/null +++ b/docs/Reproducibility-and-artifact-evaluation/reproduce-micro-paper-2023-victima.md @@ -0,0 +1,179 @@ +
+Click here to see the table of contents. + +* [About](#about) +* [Summary](#summary) +* [Reuse this script in your project](#reuse-this-script-in-your-project) + * [ Install CM automation language](#install-cm-automation-language) + * [ Check CM script flags](#check-cm-script-flags) + * [ Run this script from command line](#run-this-script-from-command-line) + * [ Run this script from Python](#run-this-script-from-python) + * [ Run this script via GUI](#run-this-script-via-gui) + * [ Run this script via Docker (beta)](#run-this-script-via-docker-(beta)) +* [Customization](#customization) + * [ Variations](#variations) + * [ Script flags mapped to environment](#script-flags-mapped-to-environment) + * [ Default environment](#default-environment) +* [Script workflow, dependencies and native scripts](#script-workflow-dependencies-and-native-scripts) +* [Script output](#script-output) +* [New environment keys (filter)](#new-environment-keys-(filter)) +* [New environment keys auto-detected from customize](#new-environment-keys-auto-detected-from-customize) +* [Maintainers](#maintainers) + +
+ +*Note that this README is automatically generated - don't edit!* + +### About + + +See extra [notes](README-extra.md) from the authors and contributors. + +#### Summary + +* Category: *Reproducibility and artifact evaluation.* +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/main/script/reproduce-micro-paper-2023-victima)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* CM "database" tags to find this script: *reproduce,project,paper,micro,micro-2023,victima* +* Output cached? *False* +___ +### Reuse this script in your project + +#### Install CM automation language + +* [Installation guide](https://github.com/mlcommons/ck/blob/master/docs/installation.md) +* [CM intro](https://doi.org/10.5281/zenodo.8105339) + +#### Pull CM repository with this automation + +```cm pull repo mlcommons@cm4mlops --checkout=dev``` + + +#### Run this script from command line + +1. `cm run script --tags=reproduce,project,paper,micro,micro-2023,victima[,variations] [--input_flags]` + +2. `cmr "reproduce project paper micro micro-2023 victima[ variations]" [--input_flags]` + +* `variations` can be seen [here](#variations) + +* `input_flags` can be seen [here](#script-flags-mapped-to-environment) + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'reproduce,project,paper,micro,micro-2023,victima' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="reproduce,project,paper,micro,micro-2023,victima"``` + +Use this [online GUI](https://cKnowledge.org/cm-gui/?tags=reproduce,project,paper,micro,micro-2023,victima) to generate CM CMD. + +#### Run this script via Docker (beta) + +`cm docker script "reproduce project paper micro micro-2023 victima[ variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_install_deps` + - Workflow: + * `_plot` + - Workflow: + * `_run` + - Workflow: + +
+ + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--container=value` → `CM_VICTIMA_CONTAINER=value` +* `--job_manager=value` → `CM_VICTIMA_JOB_MANAGER=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "container":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_VICTIMA_JOB_MANAGER: `native` +* CM_VICTIMA_CONTAINER: `docker` + +
+ +___ +### Script workflow, dependencies and native scripts + +
+Click here to expand this section. + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/main/script/reproduce-micro-paper-2023-victima/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/main/script/detect-os) + * get,python + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/main/script/get-python3) + * get,git,repo,_repo.https://github.com/CMU-SAFARI/Victima + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/main/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/main/script/reproduce-micro-paper-2023-victima/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/main/script/reproduce-micro-paper-2023-victima/_cm.yaml) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/main/script/reproduce-micro-paper-2023-victima/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/main/script/reproduce-micro-paper-2023-victima/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/main/script/reproduce-micro-paper-2023-victima/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/main/script/reproduce-micro-paper-2023-victima/_cm.yaml) +
+ +___ +### Script output +`cmr "reproduce project paper micro micro-2023 victima[,variations]" [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize + +___ +### Maintainers + +* [Open MLCommons taskforce on automation and reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) \ No newline at end of file diff --git a/docs/Tests/print-croissant-desc.md b/docs/Tests/print-croissant-desc.md new file mode 100644 index 0000000000..fafb367746 --- /dev/null +++ b/docs/Tests/print-croissant-desc.md @@ -0,0 +1,144 @@ +Automatically generated README for this automation recipe: **print-croissant-desc** + +Category: **Tests** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=print-croissant-desc,59116d5c98a04d4f) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-croissant-desc)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *print,croissant,desc* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "print croissant desc" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=print,croissant,desc` + +`cm run script --tags=print,croissant,desc [--input_flags]` + +*or* + +`cmr "print croissant desc"` + +`cmr "print croissant desc " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'print,croissant,desc' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="print,croissant,desc"``` + +#### Run this script via Docker (beta) + +`cm docker script "print croissant desc" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--url=value` → `CM_PRINT_CROISSANT_URL=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "url":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_PRINT_CROISSANT_URL: `https://raw.githubusercontent.com/mlcommons/croissant/main/datasets/1.0/gpt-3/metadata.json` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-croissant-desc/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,croissant + * CM names: `--adr.['croissant']...` + - CM script: [get-croissant](https://github.com/mlcommons/cm4mlops/tree/master/script/get-croissant) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-croissant-desc/_cm.yaml) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-croissant-desc/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-croissant-desc/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-croissant-desc/_cm.yaml) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-croissant-desc/_cm.yaml) + +___ +### Script output +`cmr "print croissant desc " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Tests/print-hello-world-java.md b/docs/Tests/print-hello-world-java.md new file mode 100644 index 0000000000..2b51ce3f03 --- /dev/null +++ b/docs/Tests/print-hello-world-java.md @@ -0,0 +1,123 @@ +Automatically generated README for this automation recipe: **print-hello-world-java** + +Category: **Tests** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=print-hello-world-java,3b62dc46cce3489c) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-java)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *print,hello world,hello-world,hello,world,java* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "print hello world hello-world hello world java" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=print,hello world,hello-world,hello,world,java` + +`cm run script --tags=print,hello world,hello-world,hello,world,java ` + +*or* + +`cmr "print hello world hello-world hello world java"` + +`cmr "print hello world hello-world hello world java " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'print,hello world,hello-world,hello,world,java' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="print,hello world,hello-world,hello,world,java"``` + +#### Run this script via Docker (beta) + +`cm docker script "print hello world hello-world hello world java" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-java/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,java + * CM names: `--adr.['java']...` + - CM script: [get-java](https://github.com/mlcommons/cm4mlops/tree/master/script/get-java) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-java/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-java/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-java/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-java/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-java/_cm.json) + +___ +### Script output +`cmr "print hello world hello-world hello world java " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Tests/print-hello-world-javac.md b/docs/Tests/print-hello-world-javac.md new file mode 100644 index 0000000000..3e1db5b116 --- /dev/null +++ b/docs/Tests/print-hello-world-javac.md @@ -0,0 +1,123 @@ +Automatically generated README for this automation recipe: **print-hello-world-javac** + +Category: **Tests** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=print-hello-world-javac,040fafd538104819) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-javac)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *print,hello world,hello-world,hello,world,javac* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "print hello world hello-world hello world javac" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=print,hello world,hello-world,hello,world,javac` + +`cm run script --tags=print,hello world,hello-world,hello,world,javac ` + +*or* + +`cmr "print hello world hello-world hello world javac"` + +`cmr "print hello world hello-world hello world javac " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'print,hello world,hello-world,hello,world,javac' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="print,hello world,hello-world,hello,world,javac"``` + +#### Run this script via Docker (beta) + +`cm docker script "print hello world hello-world hello world javac" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-javac/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,javac + * CM names: `--adr.['javac']...` + - CM script: [get-javac](https://github.com/mlcommons/cm4mlops/tree/master/script/get-javac) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-javac/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-javac/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-javac/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-javac/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-javac/_cm.json) + +___ +### Script output +`cmr "print hello world hello-world hello world javac " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Tests/print-hello-world-py.md b/docs/Tests/print-hello-world-py.md new file mode 100644 index 0000000000..ddfa31d1de --- /dev/null +++ b/docs/Tests/print-hello-world-py.md @@ -0,0 +1,129 @@ +Automatically generated README for this automation recipe: **print-hello-world-py** + +Category: **Tests** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=print-hello-world-py,d83274c7eb754d90) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-py)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *print,hello world,hello-world,hello,world,python* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "print hello world hello-world hello world python" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=print,hello world,hello-world,hello,world,python` + +`cm run script --tags=print,hello world,hello-world,hello,world,python ` + +*or* + +`cmr "print hello world hello-world hello world python"` + +`cmr "print hello world hello-world hello world python " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'print,hello world,hello-world,hello,world,python' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="print,hello world,hello-world,hello,world,python"``` + +#### Run this script via Docker (beta) + +`cm docker script "print hello world hello-world hello world python" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-py/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * print,python-version + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_SKIP_PRINT': ['True'], 'CM_SKIP_PRINT2': ['True']}` + - CM script: [print-python-version](https://github.com/mlcommons/cm4mlops/tree/master/script/print-python-version) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-py/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-py/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-py/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-py/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-py/_cm.json) + +___ +### Script output +`cmr "print hello world hello-world hello world python " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Tests/print-hello-world.md b/docs/Tests/print-hello-world.md new file mode 100644 index 0000000000..1505464e73 --- /dev/null +++ b/docs/Tests/print-hello-world.md @@ -0,0 +1,155 @@ +Automatically generated README for this automation recipe: **print-hello-world** + +Category: **Tests** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=print-hello-world,b9f0acba4aca4baa) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *print,hello-world,hello world,hello,world,native-script,native,script* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "print hello-world hello world hello world native-script native script" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=print,hello-world,hello world,hello,world,native-script,native,script` + +`cm run script --tags=print,hello-world,hello world,hello,world,native-script,native,script[,variations] [--input_flags]` + +*or* + +`cmr "print hello-world hello world hello world native-script native script"` + +`cmr "print hello-world hello world hello world native-script native script [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'print,hello-world,hello world,hello,world,native-script,native,script' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="print,hello-world,hello world,hello,world,native-script,native,script"``` + +#### Run this script via Docker (beta) + +`cm docker script "print hello-world hello world hello world native-script native script[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_skip_print_env` + - Environment variables: + - *CM_PRINT_HELLO_WORLD_SKIP_PRINT_ENV*: `yes` + - Workflow: + * `_text.#` + - Environment variables: + - *CM_PRINT_HELLO_WORLD_TEXT*: `#` + - Workflow: + +
+ + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--test1=value` → `CM_ENV_TEST1=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "test1":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_ENV_TEST1: `TEST1` + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world/_cm.yaml) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world/_cm.yaml) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world/_cm.yaml) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world/_cm.yaml) + +___ +### Script output +`cmr "print hello-world hello world hello world native-script native script [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_ENV_TEST*` +#### New environment keys auto-detected from customize diff --git a/docs/Tests/print-python-version.md b/docs/Tests/print-python-version.md new file mode 100644 index 0000000000..09db440129 --- /dev/null +++ b/docs/Tests/print-python-version.md @@ -0,0 +1,121 @@ +Automatically generated README for this automation recipe: **print-python-version** + +Category: **Tests** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=print-python-version,d3a538fa4abb464b) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-python-version)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *print,python,version,python-version* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "print python version python-version" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=print,python,version,python-version` + +`cm run script --tags=print,python,version,python-version ` + +*or* + +`cmr "print python version python-version"` + +`cmr "print python version python-version " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'print,python,version,python-version' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="print,python,version,python-version"``` + +#### Run this script via Docker (beta) + +`cm docker script "print python version python-version" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-python-version/_cm.json)*** + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-python-version/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-python-version/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-python-version/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-python-version/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-python-version/_cm.json) + +___ +### Script output +`cmr "print python version python-version " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Tests/run-python.md b/docs/Tests/run-python.md new file mode 100644 index 0000000000..7ab1b2aec5 --- /dev/null +++ b/docs/Tests/run-python.md @@ -0,0 +1,138 @@ +Automatically generated README for this automation recipe: **run-python** + +Category: **Tests** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=run-python,75a46d84ee6f49b0) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-python)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *run,python* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "run python" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=run,python` + +`cm run script --tags=run,python [--input_flags]` + +*or* + +`cmr "run python"` + +`cmr "run python " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'run,python' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="run,python"``` + +#### Run this script via Docker (beta) + +`cm docker script "run python" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--command=value` → `CM_RUN_PYTHON_CMD=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "command":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-python/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-python/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-python/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-python/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-python/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-python/_cm.json) + +___ +### Script output +`cmr "run python " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Tests/test-deps-conditions.md b/docs/Tests/test-deps-conditions.md new file mode 100644 index 0000000000..4c0ee33ed8 --- /dev/null +++ b/docs/Tests/test-deps-conditions.md @@ -0,0 +1,151 @@ +Automatically generated README for this automation recipe: **test-deps-conditions** + +Category: **Tests** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=test-deps-conditions,5cb82aee472640df) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-deps-conditions)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *test,deps,conditions* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "test deps conditions" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=test,deps,conditions` + +`cm run script --tags=test,deps,conditions [--input_flags]` + +*or* + +`cmr "test deps conditions"` + +`cmr "test deps conditions " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'test,deps,conditions' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="test,deps,conditions"``` + +#### Run this script via Docker (beta) + +`cm docker script "test deps conditions" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--test1=value` → `CM_ENV1=value` +* `--test2=value` → `CM_ENV2=value` +* `--test3=value` → `CM_ENV3=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "test1":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-deps-conditions/_cm.yaml)*** + * print,native,hello-world,_skip_print_env + - CM script: [print-hello-world](https://github.com/mlcommons/cm4mlops/tree/master/script/print-hello-world) + * print,native,hello-world,_skip_print_env,_text.SKIP_IF_ALL_ENV + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_ENV1': [True], 'CM_ENV2': [True], 'CM_ENV3': [True]}` + - CM script: [print-hello-world](https://github.com/mlcommons/cm4mlops/tree/master/script/print-hello-world) + * print,native,hello-world,_skip_print_env,_text.SKIP_IF_ANY_ENV + * Skip this dependenecy only if any of ENV vars are set:
+`{'CM_ENV1': [True], 'CM_ENV2': [True], 'CM_ENV3': [True]}` + - CM script: [print-hello-world](https://github.com/mlcommons/cm4mlops/tree/master/script/print-hello-world) + * print,native,hello-world,_skip_print_env,_text.ENABLE_IF_ALL_ENV + * Enable this dependency only if all ENV vars are set:
+`{'CM_ENV1': [True], 'CM_ENV2': [True], 'CM_ENV3': [True]}` + - CM script: [print-hello-world](https://github.com/mlcommons/cm4mlops/tree/master/script/print-hello-world) + * print,native,hello-world,_skip_print_env,_text.ENABLE_IF_ANY_ENV + * Enable this dependency only if any of ENV vars are set:
+`{'CM_ENV1': [True], 'CM_ENV2': [True], 'CM_ENV3': [True]}` + - CM script: [print-hello-world](https://github.com/mlcommons/cm4mlops/tree/master/script/print-hello-world) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-deps-conditions/_cm.yaml) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-deps-conditions/_cm.yaml) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-deps-conditions/_cm.yaml) + +___ +### Script output +`cmr "test deps conditions " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Tests/test-download-and-extract-artifacts.md b/docs/Tests/test-download-and-extract-artifacts.md new file mode 100644 index 0000000000..c0b6cf1de2 --- /dev/null +++ b/docs/Tests/test-download-and-extract-artifacts.md @@ -0,0 +1,123 @@ +Automatically generated README for this automation recipe: **test-download-and-extract-artifacts** + +Category: **Tests** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=test-download-and-extract-artifacts,51dde7580b404b27) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-download-and-extract-artifacts)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *test,download-and-extract-artifacts* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "test download-and-extract-artifacts" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=test,download-and-extract-artifacts` + +`cm run script --tags=test,download-and-extract-artifacts ` + +*or* + +`cmr "test download-and-extract-artifacts"` + +`cmr "test download-and-extract-artifacts " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'test,download-and-extract-artifacts' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="test,download-and-extract-artifacts"``` + +#### Run this script via Docker (beta) + +`cm docker script "test download-and-extract-artifacts" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-download-and-extract-artifacts/_cm.yaml)*** + * download,file,_url.https://zenodo.org/record/4735647/files/resnet50_v1.onnx + - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) + * download-and-extract,_extract,_url.https://zenodo.org/record/5597155/files/3dunet_kits19_128x128x128.tf.zip?download=1 + - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-download-and-extract-artifacts/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-download-and-extract-artifacts/_cm.yaml) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-download-and-extract-artifacts/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-download-and-extract-artifacts/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-download-and-extract-artifacts/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-download-and-extract-artifacts/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-download-and-extract-artifacts/_cm.yaml) + +___ +### Script output +`cmr "test download-and-extract-artifacts " -j` +#### New environment keys (filter) + +* `CM_REPRODUCE_PAPER_XYZ*` +#### New environment keys auto-detected from customize diff --git a/docs/Tests/test-set-sys-user-cm.md b/docs/Tests/test-set-sys-user-cm.md new file mode 100644 index 0000000000..5edef2acb3 --- /dev/null +++ b/docs/Tests/test-set-sys-user-cm.md @@ -0,0 +1,118 @@ +Automatically generated README for this automation recipe: **test-set-sys-user-cm** + +Category: **Tests** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=test-set-sys-user-cm,25fdfcf0fe434af2) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-set-sys-user-cm)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *demo,set,sys-user,cm,sys-user-cm* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "demo set sys-user cm sys-user-cm" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=demo,set,sys-user,cm,sys-user-cm` + +`cm run script --tags=demo,set,sys-user,cm,sys-user-cm ` + +*or* + +`cmr "demo set sys-user cm sys-user-cm"` + +`cmr "demo set sys-user cm sys-user-cm " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'demo,set,sys-user,cm,sys-user-cm' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="demo,set,sys-user,cm,sys-user-cm"``` + +#### Run this script via Docker (beta) + +`cm docker script "demo set sys-user cm sys-user-cm" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_SUDO: `sudo` + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-set-sys-user-cm/_cm.json) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-set-sys-user-cm/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-set-sys-user-cm/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-set-sys-user-cm/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-set-sys-user-cm/_cm.json) + +___ +### Script output +`cmr "demo set sys-user cm sys-user-cm " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Tests/upgrade-python-pip.md b/docs/Tests/upgrade-python-pip.md new file mode 100644 index 0000000000..cacd17ff80 --- /dev/null +++ b/docs/Tests/upgrade-python-pip.md @@ -0,0 +1,123 @@ +Automatically generated README for this automation recipe: **upgrade-python-pip** + +Category: **Tests** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=upgrade-python-pip,4343ed2d9a974923) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/upgrade-python-pip)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *upgrade,python,pip,python-pip* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "upgrade python pip python-pip" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=upgrade,python,pip,python-pip` + +`cm run script --tags=upgrade,python,pip,python-pip ` + +*or* + +`cmr "upgrade python pip python-pip"` + +`cmr "upgrade python pip python-pip " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'upgrade,python,pip,python-pip' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="upgrade,python,pip,python-pip"``` + +#### Run this script via Docker (beta) + +`cm docker script "upgrade python pip python-pip" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/upgrade-python-pip/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/upgrade-python-pip/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/upgrade-python-pip/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/upgrade-python-pip/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/upgrade-python-pip/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/upgrade-python-pip/_cm.json) + +___ +### Script output +`cmr "upgrade python pip python-pip " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/TinyML-automation/create-fpgaconvnet-app-tinyml.md b/docs/TinyML-automation/create-fpgaconvnet-app-tinyml.md new file mode 100644 index 0000000000..c1644ada55 --- /dev/null +++ b/docs/TinyML-automation/create-fpgaconvnet-app-tinyml.md @@ -0,0 +1,156 @@ +Automatically generated README for this automation recipe: **create-fpgaconvnet-app-tinyml** + +Category: **TinyML automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=create-fpgaconvnet-app-tinyml,618f3520e98e4728) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-app-tinyml)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *create,app,fpgaconvnet* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "create app fpgaconvnet" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=create,app,fpgaconvnet` + +`cm run script --tags=create,app,fpgaconvnet[,variations] ` + +*or* + +`cmr "create app fpgaconvnet"` + +`cmr "create app fpgaconvnet [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'create,app,fpgaconvnet' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="create,app,fpgaconvnet"``` + +#### Run this script via Docker (beta) + +`cm docker script "create app fpgaconvnet[variations]" ` + +___ +### Customization + + +#### Variations + + * Group "**benchmark**" +
+ Click here to expand this section. + + * **`_ic`** (default) + - Workflow: + +
+ + + * Group "**board**" +
+ Click here to expand this section. + + * **`_zc706`** (default) + - Environment variables: + - *CM_TINY_BOARD*: `zc706` + - Workflow: + +
+ + +#### Default variations + +`_ic,_zc706` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-app-tinyml/_cm.json)*** + * create,fpgaconvnet,config + * CM names: `--adr.['config-generator']...` + - CM script: [create-fpgaconvnet-config-tinyml](https://github.com/mlcommons/cm4mlops/tree/master/script/create-fpgaconvnet-config-tinyml) + * get,xilinx,sdk + * CM names: `--adr.['xilinx-sdk']...` + - CM script: [get-xilinx-sdk](https://github.com/mlcommons/cm4mlops/tree/master/script/get-xilinx-sdk) + * get,tensorflow + * CM names: `--adr.['tensorflow']...` + - CM script: [install-tensorflow-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-tensorflow-from-src) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-app-tinyml/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-app-tinyml/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-app-tinyml/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-app-tinyml/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-app-tinyml/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-app-tinyml/_cm.json) + +___ +### Script output +`cmr "create app fpgaconvnet [,variations]" -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/TinyML-automation/create-fpgaconvnet-config-tinyml.md b/docs/TinyML-automation/create-fpgaconvnet-config-tinyml.md new file mode 100644 index 0000000000..4f6b5eb62c --- /dev/null +++ b/docs/TinyML-automation/create-fpgaconvnet-config-tinyml.md @@ -0,0 +1,173 @@ +Automatically generated README for this automation recipe: **create-fpgaconvnet-config-tinyml** + +Category: **TinyML automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=create-fpgaconvnet-config-tinyml,f6cdad166cfa47bc) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-config-tinyml)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *create,config,fpgaconvnet* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "create config fpgaconvnet" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=create,config,fpgaconvnet` + +`cm run script --tags=create,config,fpgaconvnet[,variations] ` + +*or* + +`cmr "create config fpgaconvnet"` + +`cmr "create config fpgaconvnet [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'create,config,fpgaconvnet' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="create,config,fpgaconvnet"``` + +#### Run this script via Docker (beta) + +`cm docker script "create config fpgaconvnet[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_zc706,ic` + - Environment variables: + - *CM_TINY_NETWORK_NAME*: `zc706-resnet` + - Workflow: + +
+ + + * Group "**benchmark**" +
+ Click here to expand this section. + + * **`_ic`** (default) + - Workflow: + +
+ + + * Group "**board**" +
+ Click here to expand this section. + + * **`_zc706`** (default) + - Environment variables: + - *CM_TINY_BOARD*: `zc706` + - Workflow: + +
+ + +#### Default variations + +`_ic,_zc706` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-config-tinyml/_cm.json)*** + * get,python3 + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,ml-model,tiny + * CM names: `--adr.['ml-model']...` + - CM script: [get-ml-model-tiny-resnet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-tiny-resnet) + * get,git,repo,_repo.https://github.com/mlcommons/submissions_tiny_v1.1 + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-config-tinyml/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-config-tinyml/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-config-tinyml/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-config-tinyml/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-config-tinyml/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-config-tinyml/_cm.json) + +___ +### Script output +`cmr "create config fpgaconvnet [,variations]" -j` +#### New environment keys (filter) + +* `CM_TINY_FPGACONVNET*` +#### New environment keys auto-detected from customize + +* `CM_TINY_FPGACONVNET_' + network_env_name + '_CODE_PATH` +* `CM_TINY_FPGACONVNET_' + network_env_name + '_RUN_DIR` +* `CM_TINY_FPGACONVNET_CONFIG_FILE_' + network_env_name + '_PATH` +* `CM_TINY_FPGACONVNET_NETWORK_ENV_NAME` +* `CM_TINY_FPGACONVNET_NETWORK_NAME` \ No newline at end of file diff --git a/docs/TinyML-automation/flash-tinyml-binary.md b/docs/TinyML-automation/flash-tinyml-binary.md new file mode 100644 index 0000000000..8d41da6b94 --- /dev/null +++ b/docs/TinyML-automation/flash-tinyml-binary.md @@ -0,0 +1,175 @@ +Automatically generated README for this automation recipe: **flash-tinyml-binary** + +Category: **TinyML automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=flash-tinyml-binary,98913babb43f4fcb) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/flash-tinyml-binary)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *flash,tiny,mlperf,mlcommons* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "flash tiny mlperf mlcommons" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=flash,tiny,mlperf,mlcommons` + +`cm run script --tags=flash,tiny,mlperf,mlcommons[,variations] [--input_flags]` + +*or* + +`cmr "flash tiny mlperf mlcommons"` + +`cmr "flash tiny mlperf mlcommons [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'flash,tiny,mlperf,mlcommons' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="flash,tiny,mlperf,mlcommons"``` + +#### Run this script via Docker (beta) + +`cm docker script "flash tiny mlperf mlcommons[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_NRF` + - Workflow: + * `_NUCLEO` + - Workflow: + * `_ad` + - Workflow: + * `_cmsis_nn` + - Workflow: + * `_ic` + - Workflow: + * `_kws` + - Workflow: + * `_native` + - Workflow: + * `_vww` + - Workflow: + +
+ + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--build_dir=value` → `CM_TINY_BUILD_DIR=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "build_dir":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `r1.0` + +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/flash-tinyml-binary/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,zephyr + * CM names: `--adr.['zephyr']...` + - CM script: [get-zephyr](https://github.com/mlcommons/cm4mlops/tree/master/script/get-zephyr) + * get,zephyr-sdk + * CM names: `--adr.['zephyr-sdk']...` + - CM script: [get-zephyr-sdk](https://github.com/mlcommons/cm4mlops/tree/master/script/get-zephyr-sdk) + * reproduce,tiny,mlperf + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_TINY_BUILD_DIR': ['on']}` + - CM script: [reproduce-mlperf-octoml-tinyml-results](https://github.com/mlcommons/cm4mlops/tree/master/script/reproduce-mlperf-octoml-tinyml-results) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/flash-tinyml-binary/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/flash-tinyml-binary/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/flash-tinyml-binary/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/flash-tinyml-binary/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/flash-tinyml-binary/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/flash-tinyml-binary/_cm.json) + +___ +### Script output +`cmr "flash tiny mlperf mlcommons [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/TinyML-automation/get-microtvm.md b/docs/TinyML-automation/get-microtvm.md new file mode 100644 index 0000000000..54ad7bfa46 --- /dev/null +++ b/docs/TinyML-automation/get-microtvm.md @@ -0,0 +1,162 @@ +Automatically generated README for this automation recipe: **get-microtvm** + +Category: **TinyML automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-microtvm,a9cad70972a140b9) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-microtvm)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,src,source,microtvm,tiny* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get src source microtvm tiny" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,src,source,microtvm,tiny` + +`cm run script --tags=get,src,source,microtvm,tiny[,variations] [--input_flags]` + +*or* + +`cmr "get src source microtvm tiny"` + +`cmr "get src source microtvm tiny [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,src,source,microtvm,tiny' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,src,source,microtvm,tiny"``` + +#### Run this script via Docker (beta) + +`cm docker script "get src source microtvm tiny[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_full-history` + - Environment variables: + - *CM_GIT_DEPTH*: `--depth 10` + - Workflow: + * `_short-history` + - Environment variables: + - *CM_GIT_DEPTH*: `--depth 10` + - Workflow: + +
+ + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--ssh=value` → `CM_GIT_SSH=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "ssh":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `main` + +* `custom` +* `main` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-microtvm/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-microtvm/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-microtvm/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-microtvm/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-microtvm/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-microtvm/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-microtvm/_cm.json) + +___ +### Script output +`cmr "get src source microtvm tiny [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_MICROTVM_*` +#### New environment keys auto-detected from customize + +* `CM_MICROTVM_SOURCE` \ No newline at end of file diff --git a/docs/TinyML-automation/get-zephyr-sdk.md b/docs/TinyML-automation/get-zephyr-sdk.md new file mode 100644 index 0000000000..07c2df73b5 --- /dev/null +++ b/docs/TinyML-automation/get-zephyr-sdk.md @@ -0,0 +1,126 @@ +Automatically generated README for this automation recipe: **get-zephyr-sdk** + +Category: **TinyML automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-zephyr-sdk,c70ae1a7567f4a7b) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr-sdk)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,zephyr-sdk* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get zephyr-sdk" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,zephyr-sdk` + +`cm run script --tags=get,zephyr-sdk ` + +*or* + +`cmr "get zephyr-sdk"` + +`cmr "get zephyr-sdk " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,zephyr-sdk' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,zephyr-sdk"``` + +#### Run this script via Docker (beta) + +`cm docker script "get zephyr-sdk" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `0.13.2` + +* `0.13.1` +* `0.13.2` +* `0.15.0` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr-sdk/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr-sdk/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr-sdk/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr-sdk/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr-sdk/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr-sdk/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr-sdk/_cm.json) + +___ +### Script output +`cmr "get zephyr-sdk " -j` +#### New environment keys (filter) + +* `ZEPHYR_*` +#### New environment keys auto-detected from customize diff --git a/docs/TinyML-automation/get-zephyr.md b/docs/TinyML-automation/get-zephyr.md new file mode 100644 index 0000000000..e36cd805c6 --- /dev/null +++ b/docs/TinyML-automation/get-zephyr.md @@ -0,0 +1,132 @@ +Automatically generated README for this automation recipe: **get-zephyr** + +Category: **TinyML automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-zephyr,d4105c2cdb044276) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,zephyr* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get zephyr" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,zephyr` + +`cm run script --tags=get,zephyr ` + +*or* + +`cmr "get zephyr"` + +`cmr "get zephyr " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,zephyr' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,zephyr"``` + +#### Run this script via Docker (beta) + +`cm docker script "get zephyr" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `v2.7` + +* `v2.7` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr/_cm.json)*** + * get,python3 + * CM names: `--adr.['python3', 'python']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,cmake + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + * get,generic-python-lib,_west + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr/_cm.json) + 1. ***Run native script if exists*** + * [run-ubuntu.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr/run-ubuntu.sh) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr/_cm.json) + +___ +### Script output +`cmr "get zephyr " -j` +#### New environment keys (filter) + +* `CM_ZEPHYR_*` +#### New environment keys auto-detected from customize + +* `CM_ZEPHYR_DIR` \ No newline at end of file diff --git a/docs/img/logo_v2.svg b/docs/img/logo_v2.svg new file mode 100644 index 0000000000..fb655c6278 --- /dev/null +++ b/docs/img/logo_v2.svg @@ -0,0 +1,6 @@ + + + MLCommons + + + diff --git a/docs/img/pages (80).png b/docs/img/pages (80).png new file mode 100644 index 0000000000..0ca65735a7 Binary files /dev/null and b/docs/img/pages (80).png differ diff --git a/docs/index.md b/docs/index.md new file mode 100644 index 0000000000..32d46ee883 --- /dev/null +++ b/docs/index.md @@ -0,0 +1 @@ +../README.md \ No newline at end of file diff --git a/docs/requirements.txt b/docs/requirements.txt new file mode 100644 index 0000000000..39fab4e1ff --- /dev/null +++ b/docs/requirements.txt @@ -0,0 +1,4 @@ +mkdocs-material +swagger-markdown +mkdocs-macros-plugin +ruamel.yaml diff --git a/mkdocs.yml b/mkdocs.yml new file mode 100644 index 0000000000..c4b7ea5503 --- /dev/null +++ b/mkdocs.yml @@ -0,0 +1,327 @@ +site_name: MLPerf Inference Documentation +repo_url: https://github.com/mlcommons/cm4mlops +theme: + name: material + logo: img/logo_v2.svg + favicon: img/logo_v2.svg + palette: + primary: deep purple + accent: green + features: + - content.tabs.link + - content.code.copy + - navigation.expand + - navigation.sections + - navigation.indexes + - navigation.instant + - navigation.tabs + - navigation.tabs.sticky + - navigation.top + - toc.follow +nav: + - CM Scripts: + - index.md + - Python automation: + - activate-python-venv: docs\Python-automation\activate-python-venv.md + - get-generic-python-lib: docs\Python-automation\get-generic-python-lib.md + - get-python3: docs\Python-automation\get-python3.md + - install-generic-conda-package: docs\Python-automation\install-generic-conda-package.md + - install-python-src: docs\Python-automation\install-python-src.md + - install-python-venv: docs\Python-automation\install-python-venv.md + - MLPerf benchmark support: + - add-custom-nvidia-system: docs\MLPerf-benchmark-support\add-custom-nvidia-system.md + - benchmark-any-mlperf-inference-implementation: docs\MLPerf-benchmark-support\benchmark-any-mlperf-inference-implementation.md + - build-mlperf-inference-server-nvidia: docs\MLPerf-benchmark-support\build-mlperf-inference-server-nvidia.md + - generate-mlperf-inference-submission: docs\MLPerf-benchmark-support\generate-mlperf-inference-submission.md + - generate-mlperf-inference-user-conf: docs\MLPerf-benchmark-support\generate-mlperf-inference-user-conf.md + - generate-mlperf-tiny-report: docs\MLPerf-benchmark-support\generate-mlperf-tiny-report.md + - generate-mlperf-tiny-submission: docs\MLPerf-benchmark-support\generate-mlperf-tiny-submission.md + - generate-nvidia-engine: docs\MLPerf-benchmark-support\generate-nvidia-engine.md + - get-mlperf-inference-intel-scratch-space: docs\MLPerf-benchmark-support\get-mlperf-inference-intel-scratch-space.md + - get-mlperf-inference-loadgen: docs\MLPerf-benchmark-support\get-mlperf-inference-loadgen.md + - get-mlperf-inference-nvidia-common-code: docs\MLPerf-benchmark-support\get-mlperf-inference-nvidia-common-code.md + - get-mlperf-inference-nvidia-scratch-space: docs\MLPerf-benchmark-support\get-mlperf-inference-nvidia-scratch-space.md + - get-mlperf-inference-results: docs\MLPerf-benchmark-support\get-mlperf-inference-results.md + - get-mlperf-inference-results-dir: docs\MLPerf-benchmark-support\get-mlperf-inference-results-dir.md + - get-mlperf-inference-src: docs\MLPerf-benchmark-support\get-mlperf-inference-src.md + - get-mlperf-inference-submission-dir: docs\MLPerf-benchmark-support\get-mlperf-inference-submission-dir.md + - get-mlperf-inference-sut-configs: docs\MLPerf-benchmark-support\get-mlperf-inference-sut-configs.md + - get-mlperf-inference-sut-description: docs\MLPerf-benchmark-support\get-mlperf-inference-sut-description.md + - get-mlperf-logging: docs\MLPerf-benchmark-support\get-mlperf-logging.md + - get-mlperf-power-dev: docs\MLPerf-benchmark-support\get-mlperf-power-dev.md + - get-mlperf-tiny-eembc-energy-runner-src: docs\MLPerf-benchmark-support\get-mlperf-tiny-eembc-energy-runner-src.md + - get-mlperf-tiny-src: docs\MLPerf-benchmark-support\get-mlperf-tiny-src.md + - get-mlperf-training-nvidia-code: docs\MLPerf-benchmark-support\get-mlperf-training-nvidia-code.md + - get-mlperf-training-src: docs\MLPerf-benchmark-support\get-mlperf-training-src.md + - get-nvidia-mitten: docs\MLPerf-benchmark-support\get-nvidia-mitten.md + - get-spec-ptd: docs\MLPerf-benchmark-support\get-spec-ptd.md + - import-mlperf-inference-to-experiment: docs\MLPerf-benchmark-support\import-mlperf-inference-to-experiment.md + - import-mlperf-tiny-to-experiment: docs\MLPerf-benchmark-support\import-mlperf-tiny-to-experiment.md + - import-mlperf-training-to-experiment: docs\MLPerf-benchmark-support\import-mlperf-training-to-experiment.md + - install-mlperf-logging-from-src: docs\MLPerf-benchmark-support\install-mlperf-logging-from-src.md + - prepare-training-data-bert: docs\MLPerf-benchmark-support\prepare-training-data-bert.md + - prepare-training-data-resnet: docs\MLPerf-benchmark-support\prepare-training-data-resnet.md + - preprocess-mlperf-inference-submission: docs\MLPerf-benchmark-support\preprocess-mlperf-inference-submission.md + - process-mlperf-accuracy: docs\MLPerf-benchmark-support\process-mlperf-accuracy.md + - push-mlperf-inference-results-to-github: docs\MLPerf-benchmark-support\push-mlperf-inference-results-to-github.md + - run-all-mlperf-models: docs\MLPerf-benchmark-support\run-all-mlperf-models.md + - run-mlperf-inference-mobilenet-models: docs\MLPerf-benchmark-support\run-mlperf-inference-mobilenet-models.md + - run-mlperf-inference-submission-checker: docs\MLPerf-benchmark-support\run-mlperf-inference-submission-checker.md + - run-mlperf-power-client: docs\MLPerf-benchmark-support\run-mlperf-power-client.md + - run-mlperf-power-server: docs\MLPerf-benchmark-support\run-mlperf-power-server.md + - run-mlperf-training-submission-checker: docs\MLPerf-benchmark-support\run-mlperf-training-submission-checker.md + - truncate-mlperf-inference-accuracy-log: docs\MLPerf-benchmark-support\truncate-mlperf-inference-accuracy-log.md + - Modular AI-ML application pipeline: + - app-image-classification-onnx-py: docs\Modular-AI/ML-application-pipeline\app-image-classification-onnx-py.md + - app-image-classification-tf-onnx-cpp: docs\Modular-AI/ML-application-pipeline\app-image-classification-tf-onnx-cpp.md + - app-image-classification-torch-py: docs\Modular-AI/ML-application-pipeline\app-image-classification-torch-py.md + - app-image-classification-tvm-onnx-py: docs\Modular-AI/ML-application-pipeline\app-image-classification-tvm-onnx-py.md + - app-stable-diffusion-onnx-py: docs\Modular-AI/ML-application-pipeline\app-stable-diffusion-onnx-py.md + - Modular application pipeline: + - app-image-corner-detection: docs\Modular-application-pipeline\app-image-corner-detection.md + - Modular MLPerf inference benchmark pipeline: + - app-loadgen-generic-python: docs\Modular-MLPerf-inference-benchmark-pipeline\app-loadgen-generic-python.md + - app-mlperf-inference: docs\Modular-MLPerf-inference-benchmark-pipeline\app-mlperf-inference.md + - app-mlperf-inference-ctuning-cpp-tflite: docs\Modular-MLPerf-inference-benchmark-pipeline\app-mlperf-inference-ctuning-cpp-tflite.md + - app-mlperf-inference-mlcommons-cpp: docs\Modular-MLPerf-inference-benchmark-pipeline\app-mlperf-inference-mlcommons-cpp.md + - app-mlperf-inference-mlcommons-python: docs\Modular-MLPerf-inference-benchmark-pipeline\app-mlperf-inference-mlcommons-python.md + - benchmark-program-mlperf: docs\Modular-MLPerf-inference-benchmark-pipeline\benchmark-program-mlperf.md + - run-mlperf-inference-app: docs\Modular-MLPerf-inference-benchmark-pipeline\run-mlperf-inference-app.md + - Modular MLPerf benchmarks: + - app-mlperf-inference-dummy: docs\Modular-MLPerf-benchmarks\app-mlperf-inference-dummy.md + - app-mlperf-inference-intel: docs\Modular-MLPerf-benchmarks\app-mlperf-inference-intel.md + - app-mlperf-inference-qualcomm: docs\Modular-MLPerf-benchmarks\app-mlperf-inference-qualcomm.md + - Reproduce MLPerf benchmarks: + - app-mlperf-inference-nvidia: docs\Reproduce-MLPerf-benchmarks\app-mlperf-inference-nvidia.md + - reproduce-mlperf-octoml-tinyml-results: docs\Reproduce-MLPerf-benchmarks\reproduce-mlperf-octoml-tinyml-results.md + - reproduce-mlperf-training-nvidia: docs\Reproduce-MLPerf-benchmarks\reproduce-mlperf-training-nvidia.md + - wrapper-reproduce-octoml-tinyml-submission: docs\Reproduce-MLPerf-benchmarks\wrapper-reproduce-octoml-tinyml-submission.md + - Modular MLPerf training benchmark pipeline: + - app-mlperf-training-nvidia: docs\Modular-MLPerf-training-benchmark-pipeline\app-mlperf-training-nvidia.md + - app-mlperf-training-reference: docs\Modular-MLPerf-training-benchmark-pipeline\app-mlperf-training-reference.md + - DevOps automation: + - benchmark-program: docs\DevOps-automation\benchmark-program.md + - compile-program: docs\DevOps-automation\compile-program.md + - convert-csv-to-md: docs\DevOps-automation\convert-csv-to-md.md + - copy-to-clipboard: docs\DevOps-automation\copy-to-clipboard.md + - create-conda-env: docs\DevOps-automation\create-conda-env.md + - create-patch: docs\DevOps-automation\create-patch.md + - detect-sudo: docs\DevOps-automation\detect-sudo.md + - download-and-extract: docs\DevOps-automation\download-and-extract.md + - download-file: docs\DevOps-automation\download-file.md + - download-torrent: docs\DevOps-automation\download-torrent.md + - extract-file: docs\DevOps-automation\extract-file.md + - fail: docs\DevOps-automation\fail.md + - get-conda: docs\DevOps-automation\get-conda.md + - get-git-repo: docs\DevOps-automation\get-git-repo.md + - get-github-cli: docs\DevOps-automation\get-github-cli.md + - pull-git-repo: docs\DevOps-automation\pull-git-repo.md + - push-csv-to-spreadsheet: docs\DevOps-automation\push-csv-to-spreadsheet.md + - set-device-settings-qaic: docs\DevOps-automation\set-device-settings-qaic.md + - set-echo-off-win: docs\DevOps-automation\set-echo-off-win.md + - set-performance-mode: docs\DevOps-automation\set-performance-mode.md + - set-sqlite-dir: docs\DevOps-automation\set-sqlite-dir.md + - tar-my-folder: docs\DevOps-automation\tar-my-folder.md + - Docker automation: + - build-docker-image: docs\Docker-automation\build-docker-image.md + - build-dockerfile: docs\Docker-automation\build-dockerfile.md + - prune-docker: docs\Docker-automation\prune-docker.md + - run-docker-container: docs\Docker-automation\run-docker-container.md + - AI-ML optimization: + - calibrate-model-for.qaic: docs\AI/ML-optimization\calibrate-model-for.qaic.md + - compile-model-for.qaic: docs\AI/ML-optimization\compile-model-for.qaic.md + - prune-bert-models: docs\AI/ML-optimization\prune-bert-models.md + - AI-ML models: + - convert-ml-model-huggingface-to-onnx: docs\AI/ML-models\convert-ml-model-huggingface-to-onnx.md + - get-bert-squad-vocab: docs\AI/ML-models\get-bert-squad-vocab.md + - get-dlrm: docs\AI/ML-models\get-dlrm.md + - get-ml-model-3d-unet-kits19: docs\AI/ML-models\get-ml-model-3d-unet-kits19.md + - get-ml-model-bert-base-squad: docs\AI/ML-models\get-ml-model-bert-base-squad.md + - get-ml-model-bert-large-squad: docs\AI/ML-models\get-ml-model-bert-large-squad.md + - get-ml-model-dlrm-terabyte: docs\AI/ML-models\get-ml-model-dlrm-terabyte.md + - get-ml-model-efficientnet-lite: docs\AI/ML-models\get-ml-model-efficientnet-lite.md + - get-ml-model-gptj: docs\AI/ML-models\get-ml-model-gptj.md + - get-ml-model-huggingface-zoo: docs\AI/ML-models\get-ml-model-huggingface-zoo.md + - get-ml-model-llama2: docs\AI/ML-models\get-ml-model-llama2.md + - get-ml-model-mobilenet: docs\AI/ML-models\get-ml-model-mobilenet.md + - get-ml-model-neuralmagic-zoo: docs\AI/ML-models\get-ml-model-neuralmagic-zoo.md + - get-ml-model-resnet50: docs\AI/ML-models\get-ml-model-resnet50.md + - get-ml-model-retinanet: docs\AI/ML-models\get-ml-model-retinanet.md + - get-ml-model-retinanet-nvidia: docs\AI/ML-models\get-ml-model-retinanet-nvidia.md + - get-ml-model-rnnt: docs\AI/ML-models\get-ml-model-rnnt.md + - get-ml-model-stable-diffusion: docs\AI/ML-models\get-ml-model-stable-diffusion.md + - get-ml-model-tiny-resnet: docs\AI/ML-models\get-ml-model-tiny-resnet.md + - get-ml-model-using-imagenet-from-model-zoo: docs\AI/ML-models\get-ml-model-using-imagenet-from-model-zoo.md + - get-tvm-model: docs\AI/ML-models\get-tvm-model.md + - CM automation: + - create-custom-cache-entry: docs\CM-automation\create-custom-cache-entry.md + - TinyML automation: + - create-fpgaconvnet-app-tinyml: docs\TinyML-automation\create-fpgaconvnet-app-tinyml.md + - create-fpgaconvnet-config-tinyml: docs\TinyML-automation\create-fpgaconvnet-config-tinyml.md + - flash-tinyml-binary: docs\TinyML-automation\flash-tinyml-binary.md + - get-microtvm: docs\TinyML-automation\get-microtvm.md + - get-zephyr: docs\TinyML-automation\get-zephyr.md + - get-zephyr-sdk: docs\TinyML-automation\get-zephyr-sdk.md + - Cloud automation: + - destroy-terraform: docs\Cloud-automation\destroy-terraform.md + - get-aws-cli: docs\Cloud-automation\get-aws-cli.md + - get-terraform: docs\Cloud-automation\get-terraform.md + - install-aws-cli: docs\Cloud-automation\install-aws-cli.md + - install-terraform-from-src: docs\Cloud-automation\install-terraform-from-src.md + - run-terraform: docs\Cloud-automation\run-terraform.md + - Platform information: + - detect-cpu: docs\Platform-information\detect-cpu.md + - detect-os: docs\Platform-information\detect-os.md + - Detection or installation of tools and artifacts: + - get-android-sdk: docs\Detection-or-installation-of-tools-and-artifacts\get-android-sdk.md + - get-aria2: docs\Detection-or-installation-of-tools-and-artifacts\get-aria2.md + - get-bazel: docs\Detection-or-installation-of-tools-and-artifacts\get-bazel.md + - get-blis: docs\Detection-or-installation-of-tools-and-artifacts\get-blis.md + - get-brew: docs\Detection-or-installation-of-tools-and-artifacts\get-brew.md + - get-cmake: docs\Detection-or-installation-of-tools-and-artifacts\get-cmake.md + - get-cmsis_5: docs\Detection-or-installation-of-tools-and-artifacts\get-cmsis_5.md + - get-docker: docs\Detection-or-installation-of-tools-and-artifacts\get-docker.md + - get-generic-sys-util: docs\Detection-or-installation-of-tools-and-artifacts\get-generic-sys-util.md + - get-google-test: docs\Detection-or-installation-of-tools-and-artifacts\get-google-test.md + - get-java: docs\Detection-or-installation-of-tools-and-artifacts\get-java.md + - get-javac: docs\Detection-or-installation-of-tools-and-artifacts\get-javac.md + - get-lib-armnn: docs\Detection-or-installation-of-tools-and-artifacts\get-lib-armnn.md + - get-lib-dnnl: docs\Detection-or-installation-of-tools-and-artifacts\get-lib-dnnl.md + - get-lib-protobuf: docs\Detection-or-installation-of-tools-and-artifacts\get-lib-protobuf.md + - get-lib-qaic-api: docs\Detection-or-installation-of-tools-and-artifacts\get-lib-qaic-api.md + - get-nvidia-docker: docs\Detection-or-installation-of-tools-and-artifacts\get-nvidia-docker.md + - get-openssl: docs\Detection-or-installation-of-tools-and-artifacts\get-openssl.md + - get-rclone: docs\Detection-or-installation-of-tools-and-artifacts\get-rclone.md + - get-sys-utils-cm: docs\Detection-or-installation-of-tools-and-artifacts\get-sys-utils-cm.md + - get-sys-utils-min: docs\Detection-or-installation-of-tools-and-artifacts\get-sys-utils-min.md + - get-xilinx-sdk: docs\Detection-or-installation-of-tools-and-artifacts\get-xilinx-sdk.md + - get-zendnn: docs\Detection-or-installation-of-tools-and-artifacts\get-zendnn.md + - install-bazel: docs\Detection-or-installation-of-tools-and-artifacts\install-bazel.md + - install-cmake-prebuilt: docs\Detection-or-installation-of-tools-and-artifacts\install-cmake-prebuilt.md + - install-gflags: docs\Detection-or-installation-of-tools-and-artifacts\install-gflags.md + - install-github-cli: docs\Detection-or-installation-of-tools-and-artifacts\install-github-cli.md + - install-numactl-from-src: docs\Detection-or-installation-of-tools-and-artifacts\install-numactl-from-src.md + - install-openssl: docs\Detection-or-installation-of-tools-and-artifacts\install-openssl.md + - Compiler automation: + - get-aocl: docs\Compiler-automation\get-aocl.md + - get-cl: docs\Compiler-automation\get-cl.md + - get-compiler-flags: docs\Compiler-automation\get-compiler-flags.md + - get-compiler-rust: docs\Compiler-automation\get-compiler-rust.md + - get-gcc: docs\Compiler-automation\get-gcc.md + - get-go: docs\Compiler-automation\get-go.md + - get-llvm: docs\Compiler-automation\get-llvm.md + - install-gcc-src: docs\Compiler-automation\install-gcc-src.md + - install-ipex-from-src: docs\Compiler-automation\install-ipex-from-src.md + - install-llvm-prebuilt: docs\Compiler-automation\install-llvm-prebuilt.md + - install-llvm-src: docs\Compiler-automation\install-llvm-src.md + - install-onednn-from-src: docs\Compiler-automation\install-onednn-from-src.md + - install-onnxruntime-from-src: docs\Compiler-automation\install-onnxruntime-from-src.md + - install-pytorch-from-src: docs\Compiler-automation\install-pytorch-from-src.md + - install-pytorch-kineto-from-src: docs\Compiler-automation\install-pytorch-kineto-from-src.md + - install-torchvision-from-src: docs\Compiler-automation\install-torchvision-from-src.md + - install-tpp-pytorch-extension: docs\Compiler-automation\install-tpp-pytorch-extension.md + - install-transformers-from-src: docs\Compiler-automation\install-transformers-from-src.md + - CM Interface: + - get-cache-dir: docs\CM-Interface\get-cache-dir.md + - Legacy CK support: + - get-ck: docs\Legacy-CK-support\get-ck.md + - get-ck-repo-mlops: docs\Legacy-CK-support\get-ck-repo-mlops.md + - AI-ML datasets: + - get-croissant: docs\AI/ML-datasets\get-croissant.md + - get-dataset-cifar10: docs\AI/ML-datasets\get-dataset-cifar10.md + - get-dataset-cnndm: docs\AI/ML-datasets\get-dataset-cnndm.md + - get-dataset-coco: docs\AI/ML-datasets\get-dataset-coco.md + - get-dataset-coco2014: docs\AI/ML-datasets\get-dataset-coco2014.md + - get-dataset-criteo: docs\AI/ML-datasets\get-dataset-criteo.md + - get-dataset-imagenet-aux: docs\AI/ML-datasets\get-dataset-imagenet-aux.md + - get-dataset-imagenet-calibration: docs\AI/ML-datasets\get-dataset-imagenet-calibration.md + - get-dataset-imagenet-helper: docs\AI/ML-datasets\get-dataset-imagenet-helper.md + - get-dataset-imagenet-train: docs\AI/ML-datasets\get-dataset-imagenet-train.md + - get-dataset-imagenet-val: docs\AI/ML-datasets\get-dataset-imagenet-val.md + - get-dataset-kits19: docs\AI/ML-datasets\get-dataset-kits19.md + - get-dataset-librispeech: docs\AI/ML-datasets\get-dataset-librispeech.md + - get-dataset-openimages: docs\AI/ML-datasets\get-dataset-openimages.md + - get-dataset-openimages-annotations: docs\AI/ML-datasets\get-dataset-openimages-annotations.md + - get-dataset-openimages-calibration: docs\AI/ML-datasets\get-dataset-openimages-calibration.md + - get-dataset-openorca: docs\AI/ML-datasets\get-dataset-openorca.md + - get-dataset-squad: docs\AI/ML-datasets\get-dataset-squad.md + - get-dataset-squad-vocab: docs\AI/ML-datasets\get-dataset-squad-vocab.md + - get-preprocessed-dataset-criteo: docs\AI/ML-datasets\get-preprocessed-dataset-criteo.md + - get-preprocessed-dataset-generic: docs\AI/ML-datasets\get-preprocessed-dataset-generic.md + - get-preprocessed-dataset-imagenet: docs\AI/ML-datasets\get-preprocessed-dataset-imagenet.md + - get-preprocessed-dataset-kits19: docs\AI/ML-datasets\get-preprocessed-dataset-kits19.md + - get-preprocessed-dataset-librispeech: docs\AI/ML-datasets\get-preprocessed-dataset-librispeech.md + - get-preprocessed-dataset-openimages: docs\AI/ML-datasets\get-preprocessed-dataset-openimages.md + - get-preprocessed-dataset-openorca: docs\AI/ML-datasets\get-preprocessed-dataset-openorca.md + - get-preprocessed-dataset-squad: docs\AI/ML-datasets\get-preprocessed-dataset-squad.md + - CUDA automation: + - get-cuda: docs\CUDA-automation\get-cuda.md + - get-cuda-devices: docs\CUDA-automation\get-cuda-devices.md + - get-cudnn: docs\CUDA-automation\get-cudnn.md + - get-tensorrt: docs\CUDA-automation\get-tensorrt.md + - install-cuda-package-manager: docs\CUDA-automation\install-cuda-package-manager.md + - install-cuda-prebuilt: docs\CUDA-automation\install-cuda-prebuilt.md + - AI-ML frameworks: + - get-google-saxml: docs\AI/ML-frameworks\get-google-saxml.md + - get-onnxruntime-prebuilt: docs\AI/ML-frameworks\get-onnxruntime-prebuilt.md + - get-qaic-apps-sdk: docs\AI/ML-frameworks\get-qaic-apps-sdk.md + - get-qaic-platform-sdk: docs\AI/ML-frameworks\get-qaic-platform-sdk.md + - get-qaic-software-kit: docs\AI/ML-frameworks\get-qaic-software-kit.md + - get-rocm: docs\AI/ML-frameworks\get-rocm.md + - get-tvm: docs\AI/ML-frameworks\get-tvm.md + - install-qaic-compute-sdk-from-src: docs\AI/ML-frameworks\install-qaic-compute-sdk-from-src.md + - install-rocm: docs\AI/ML-frameworks\install-rocm.md + - install-tensorflow-for-c: docs\AI/ML-frameworks\install-tensorflow-for-c.md + - install-tensorflow-from-src: docs\AI/ML-frameworks\install-tensorflow-from-src.md + - install-tflite-from-src: docs\AI/ML-frameworks\install-tflite-from-src.md + - Reproducibility and artifact evaluation: + - get-ipol-src: docs\Reproducibility-and-artifact-evaluation\get-ipol-src.md + - process-ae-users: docs\Reproducibility-and-artifact-evaluation\process-ae-users.md + - reproduce-ipol-paper-2022-439: docs\Reproducibility-and-artifact-evaluation\reproduce-ipol-paper-2022-439.md + - reproduce-micro-paper-2023-victima: docs\Reproducibility-and-artifact-evaluation\reproduce-micro-paper-2023-victima.md + - GUI: + - gui: docs\GUI\gui.md + - Collective benchmarking: + - launch-benchmark: docs\Collective-benchmarking\launch-benchmark.md + - Tests: + - print-any-text: docs\Tests\print-any-text.md + - print-croissant-desc: docs\Tests\print-croissant-desc.md + - print-hello-world: docs\Tests\print-hello-world.md + - print-hello-world-java: docs\Tests\print-hello-world-java.md + - print-hello-world-javac: docs\Tests\print-hello-world-javac.md + - print-hello-world-py: docs\Tests\print-hello-world-py.md + - print-python-version: docs\Tests\print-python-version.md + - run-python: docs\Tests\run-python.md + - test-cm-core: docs\Tests\test-cm-core.md + - test-cm-script-pipeline: docs\Tests\test-cm-script-pipeline.md + - test-deps-conditions: docs\Tests\test-deps-conditions.md + - test-deps-conditions2: docs\Tests\test-deps-conditions2.md + - test-download-and-extract-artifacts: docs\Tests\test-download-and-extract-artifacts.md + - test-set-sys-user-cm: docs\Tests\test-set-sys-user-cm.md + - upgrade-python-pip: docs\Tests\upgrade-python-pip.md + - Dashboard automation: + - publish-results-to-dashboard: docs\Dashboard-automation\publish-results-to-dashboard.md + - Remote automation: + - remote-run-commands: docs\Remote-automation\remote-run-commands.md + - CM interface prototyping: + - test-debug: docs\CM-interface-prototyping\test-debug.md + - test-mlperf-inference-retinanet: docs\CM-interface-prototyping\test-mlperf-inference-retinanet.md + +markdown_extensions: + - pymdownx.tasklist: + custom_checkbox: true + - pymdownx.details + - admonition + - attr_list + - def_list + - footnotes + - pymdownx.superfences: + custom_fences: + - name: mermaid + class: mermaid + format: !!python/name:pymdownx.superfences.fence_code_format + - pymdownx.tabbed: + alternate_style: true +plugins: + - search + - macros diff --git a/mkdocsHelper.py b/mkdocsHelper.py new file mode 100644 index 0000000000..acaf95e2f2 --- /dev/null +++ b/mkdocsHelper.py @@ -0,0 +1,87 @@ +import os +import json +import yaml +import shutil + +def get_category_from_file(file_path): + try: + with open(file_path, 'r') as file: + if file_path.endswith('.json'): + data = json.load(file) + elif file_path.endswith('.yaml') or file_path.endswith('.yml'): + data = yaml.safe_load(file) + else: + return None + return data.get('category') + except Exception as e: + print(f"Error reading {file_path}: {e}") + return None + +def scan_folders(parent_folder): + category_dict = {} + parent_folder = os.path.join(parent_folder,"script") + for folder_name in os.listdir(parent_folder): + folder_path = os.path.join(parent_folder, folder_name) + if os.path.isdir(folder_path): + cm_file_path_json = os.path.join(folder_path, '_cm.json') + cm_file_path_yaml = os.path.join(folder_path, '_cm.yaml') + category = None + + if os.path.isfile(cm_file_path_json): + category = get_category_from_file(cm_file_path_json) + elif os.path.isfile(cm_file_path_yaml): + category = get_category_from_file(cm_file_path_yaml) + + if category: + if category not in category_dict: + category_dict[category] = [] + category_dict[category].append(folder_name) + + return category_dict + +def print_category_structure(category_dict): + # print(" - CM Scripts:") + for category, folders in category_dict.items(): + category_path = os.path.join("docs", category.replace("/", "-")) + # category_path_formated = category_path.replace("/", "-") + category_path_formated = category_path.replace(" ", "-") + if not os.path.exists(category_path_formated): + os.makedirs(category_path_formated) + # print(f" - {category}:") + for folder in folders: + folder_name = folder.replace("/", "-") + source_path_folder = os.path.join("script", folder_name) + source_file_path = os.path.join(source_path_folder, "README.md") + target_path = os.path.join(category_path_formated, folder_name + ".md") + if not os.path.exists(source_file_path): + # print(f"Source file does not exist: {source_file_path}") + continue + if not os.path.exists(os.path.dirname(target_path)): + os.makedirs(os.path.dirname(target_path)) + if os.path.exists(target_path): + # print(f"Target file already exists: {target_path}") + continue + try: + print(source_file_path) + print(target_path) + print(os.getcwd()) + shutil.copyfile(source_file_path, target_path) + # os.symlink(source_file_path, target_path) + # print(f" - {folder_name}:{target_path}") + except OSError as e: + print(f"Failed to create symlink: {e}") + print(" - CM Scripts:") + for category, folders in category_dict.items(): + category_path = os.path.join("docs", category) + category_path_formated = category_path.replace("/", "-") + category_path_formated = category_path.replace(" ", "-") + print(f" - {category.replace("/", "-")}:") + for folder in folders: + folder_name = folder.replace("/", "-") + target_path = os.path.join(category_path_formated, folder_name + ".md") + print(f" - {folder_name}: {target_path}") + +if __name__ == "__main__": + parent_folder = r"" # Replace with the actual path to the parent folder + category_dict = scan_folders(parent_folder) + print_category_structure(category_dict)