00. 환경구성 - pineland/object-tracking GitHub Wiki
FROM nvidia/cuda:10.1-cudnn7-devel-ubuntu18.04
RUN apt-get update && yes | apt-get upgrade
RUN apt-get install -y apt-utils build-essential
RUN apt-get install -y python3 python3-pip openjdk-8-jre wget
RUN ln -s /usr/bin/python3 /usr/bin/python
RUN ln -s /usr/bin/pip3 /usr/bin/pip
RUN pip install --upgrade pip
WORKDIR /root/
# install dependency libraries of Tensorflow Object Detection API.
RUN apt-get install -y protobuf-compiler python3-pil python3-lxml git
RUN pip install setuptools --upgrade
RUN pip install tensorflow-gpu==2.2
RUN git clone https://github.com/tensorflow/models.git /root/tensorflow/models
WORKDIR /root/tensorflow/models/research
RUN protoc object_detection/protos/*.proto --python_out=.
RUN export PYTHONPATH=$PYTHONPATH:`pwd`:`pwd`/slim
WORKDIR /root
RUN pip install Jupiter
RUN pip install matplotlib
RUN jupyter notebook --generate-config --allow-root
RUN echo "c.NotebookApp.password = u'sha1:6a3f528eec40:6e896b6e4828f525a6e20e5411cd1c8075d68619'" >> /root/.jupyter/jupyter_notebook_config.py
EXPOSE 8888
# 아래와 같이 해서 컨테이너 생성하면서 바로 주피터 노트북 서버 데몬을 띄워도 되고, 컨테이너 생성 시 끝에 /bin/bash를 붙여 들어가서 해당 디렉토리에서 아래의 명령을 실행해도 된다.
CMD jupyter notebook --allow-root --ip=0.0.0.0 --port=8888 --no-browser
$ docker build -t nvidia-cuda10.1-cudnn7-tensorflow-gpu2.2-ubuntu18.04:1.0 .
$ docker run --gpus all -it --rm -p 8888:8888 -v `pwd`:/root/workspace nvidia-cuda10.1-cudnn7-tensorflow-gpu2.2-ubuntu18.04:1.0
웹브라우저를 열고 http://localhost:8888 에 접속한다. 이때, 패스워드는 Dockerfile에서 설정한대로 root 이다.
# 버전 확인
import tensorflow as tf
print(tf.__version__)
2.2.0
# 그래픽카드, CUDA 등 설치 상태 확인
!nvidia-smi
Thu Jul 23 01:55:03 2020
+-----------------------------------------------------------------------------+
| NVIDIA-SMI 435.21 Driver Version: 435.21 CUDA Version: 10.1 |
|-------------------------------+----------------------+----------------------+
| GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |
| Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |
|===============================+======================+======================|
| 0 GeForce GTX 1060 Off | 00000000:01:00.0 Off | N/A |
| N/A 56C P0 26W / N/A | 441MiB / 6078MiB | 0% Default |
+-------------------------------+----------------------+----------------------+
+-----------------------------------------------------------------------------+
| Processes: GPU Memory |
| GPU PID Type Process name Usage |
|=============================================================================|
+-----------------------------------------------------------------------------+
# 텐서플로우에서 GPU 가용여부 확인
from tensorflow.python.client import device_lib
device_lib.list_local_devices()
incarnation: 14796649298003808976
physical_device_desc: "device: XLA_CPU device",
name: "/device:XLA_GPU:0"
device_type: "XLA_GPU"
memory_limit: 17179869184
locality {
}
incarnation: 17109582847013019115
physical_device_desc: "device: XLA_GPU device",
name: "/device:GPU:0"
device_type: "GPU"
memory_limit: 5475903744
locality {
bus_id: 1
links {
}
}
incarnation: 2258141267897666028
physical_device_desc: "device: 0, name: GeForce GTX 1060, pci bus id: 0000:01:00.0, compute capability: 6.1"]