Manual - wom-ai/inference_results_v0.5 GitHub Wiki

Setup

Docker

diff --git a/closed/NVIDIA/Makefile b/closed/NVIDIA/Makefile
index 53dec428..7d4f1757 100644
--- a/closed/NVIDIA/Makefile
+++ b/closed/NVIDIA/Makefile
@@ -132,7 +132,7 @@ endif
 build_docker: link_dataset_dir
 ifeq ($(ARCH), x86_64)
        @echo "Building Docker image"
-       docker build -t mlperf-inference:$(DOCKER_TAG)-latest \
+       docker build -t mlperf-inference-v0.5:$(DOCKER_TAG)-latest \
                --network host -f docker/Dockerfile .
 endif
 
@@ -141,8 +141,8 @@ endif
 docker_add_user:
 ifeq ($(ARCH), x86_64)
        @echo "Adding user account into image"
-       docker build -t mlperf-inference:$(DOCKER_TAG) --network host \
-               --build-arg BASE_IMAGE=mlperf-inference:$(DOCKER_TAG)-latest \
+       docker build -t mlperf-inference-v0.5:$(DOCKER_TAG) --network host \
+               --build-arg BASE_IMAGE=mlperf-inference-v0.5:$(DOCKER_TAG)-latest \
                --build-arg GID=$(GROUPID) --build-arg UID=$(UID) --build-arg GROUP=$(GROUPNAME) --build-arg USER=$(UNAME) \
                - < docker/Dockerfile.user
 endif
@@ -155,11 +155,11 @@ ifeq ($(ARCH), x86_64)
        nvidia-docker run --rm -ti -w /work -v ${PWD}:/work -v ${HOME}:/mnt/${HOME} \
                -v /etc/timezone:/etc/timezone:ro -v /etc/localtime:/etc/localtime:ro \
                --security-opt apparmor=unconfined --security-opt seccomp=unconfined \
-               --name mlperf-inference-$(UNAME) -h mlperf-inference-$(UNAME) --add-host mlperf-inference-$(UNAME):127.0.0.1 \
+               --name mlperf-inference-v0.5-$(UNAME) -h mlperf-inference-v0.5-$(UNAME) --add-host mlperf-inference-v0.5-$(UNAME):127.0.0.1 \
                `if [ -d /home/scratch.mlperf_inference ]; then echo "-v /home/scratch.mlperf_inference:/home/scratch.mlperf_inference"; fi` \
                `if [ -d /scratch/datasets/mlperf_inference ]; then echo "-v /scratch/datasets/mlperf_inference:/scratch/datasets/mlperf_inference"; fi` \
                `if [ -d /gpfs/fs1/datasets/mlperf_inference ]; then echo "-v /gpfs/fs1/datasets/mlperf_inference:/gpfs/fs1/datasets/mlperf_inference"; fi` \
-               --user $(UID):$(GROUPID) --net host --device /dev/fuse --cap-add SYS_ADMIN mlperf-inference:$(DOCKER_TAG)
+               --user $(UID):$(GROUPID) --net host --device /dev/fuse --cap-add SYS_ADMIN mlperf-inference-v0.5:$(DOCKER_TAG)
 endif
 
 # Download COCO datasets and GNMT inference data. Imagenet does not have public links.