Advertisement
WareHouseHD

Detectron2 Dockerfile cuda124

Oct 9th, 2024 (edited)
419
0
Never
4
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 4.44 KB | Source Code | 0 0
  1. FROM pytorch/pytorch:2.4.1-cuda12.4-cudnn9-devel
  2.  
  3. RUN apt-get update
  4. RUN apt-get install -y git
  5. RUN apt-get install -y wget
  6. RUN apt-get install -y ninja-build
  7. RUN apt-get install -y vim
  8. RUN apt-get install -y sudo
  9.  
  10. RUN wget https://bootstrap.pypa.io/pip/get-pip.py
  11. RUN python3 get-pip.py --break-system-packages --root-user-action=ignore
  12. RUN rm get-pip.py
  13.  
  14. RUN pip install --upgrade pip --root-user-action=ignore
  15. RUN pip install Flask --root-user-action=ignore
  16. RUN pip install pybind11 --root-user-action=ignore
  17. RUN pip install ninja --root-user-action=ignore
  18. RUN pip install tensorboard --root-user-action=ignore
  19. RUN pip install opencv-python-headless --root-user-action=ignore
  20. RUN pip install 'git+https://github.com/facebookresearch/fvcore' --root-user-action=ignore
  21.  
  22. # create a non-root user
  23. RUN useradd -m --no-log-init --system  --uid 991 tron2 -g sudo
  24. RUN echo '%sudo ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers
  25. USER tron2
  26. WORKDIR /home/tron2
  27. ENV PATH="/home/tron2/.local/bin:/usr/local/cuda/bin:/opt/conda/bin:/usr/local/nvidia/bin:${PATH}"
  28.  
  29. RUN python -m pip install --upgrade setuptools wheel --root-user-action=ignore
  30.  
  31. # install detectron2
  32. RUN git clone https://github.com/facebookresearch/detectron2 detectron2_repo
  33.  
  34. # set FORCE_CUDA because during `docker build` cuda is not accessible
  35. ENV FORCE_CUDA="1"
  36.  
  37. # This will by default build detectron2 for all common cuda architectures and take a lot more time,
  38. # because inside `docker build`, there is no way to tell which architecture will be used.
  39. ARG TORCH_CUDA_ARCH_LIST="Maxwell;Maxwell+Tegra;Pascal;Volta;Turing"
  40. ENV TORCH_CUDA_ARCH_LIST="${TORCH_CUDA_ARCH_LIST}"
  41.  
  42. # Set the CUDA_HOME environment variable
  43. ENV CUDA_HOME=/usr/local/cuda
  44. ENV PATH=$CUDA_HOME/bin:$PATH
  45. ENV LD_LIBRARY_PATH=$CUDA_HOME/lib64:$LD_LIBRARY_PATH
  46.  
  47. # install detectron2 that matches the current pytorch installation
  48. RUN pip install -e detectron2_repo --root-user-action=ignore
  49.  
  50. # Set a fixed model cache directory.
  51. ENV FVCORE_CACHE="/tmp"
  52.  
  53. # Set the working directory
  54. WORKDIR /home/tron2/detectron2_repo
  55.  
  56. # Create the detectron_server.py file
  57. RUN echo "\
  58. import os\n\
  59. import io\n\
  60. from flask import Flask, request, send_file\n\
  61. import cv2\n\
  62. import numpy as np\n\
  63. from detectron2 import model_zoo\n\
  64. from detectron2.config import get_cfg\n\
  65. from detectron2.engine import DefaultPredictor\n\
  66. from detectron2.utils.visualizer import Visualizer, ColorMode\n\
  67. from detectron2.data import MetadataCatalog\n\
  68. \n\
  69. # Initialize the Flask app\n\
  70. app = Flask(__name__)\n\
  71. \n\
  72. # Load the Detectron2 model\n\
  73. def setup_predictor():\n\
  74.    cfg = get_cfg()\n\
  75.    cfg.merge_from_file(model_zoo.get_config_file(\"COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml\"))\n\
  76.    cfg.MODEL.WEIGHTS = model_zoo.get_checkpoint_url(\"COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml\")\n\
  77.    cfg.MODEL.ROI_HEADS.SCORE_THRESH_TEST = 0.5  # Set the threshold for this model\n\
  78.    return DefaultPredictor(cfg), MetadataCatalog.get(cfg.DATASETS.TRAIN[0])\n\
  79. \n\
  80. predictor, metadata = setup_predictor()\n\
  81. \n\
  82. @app.route('/detect', methods=['POST'])\n\
  83. def detect_objects():\n\
  84.    # Check if an image is provided\n\
  85.    if 'image' not in request.files:\n\
  86.        return {\"error\": \"No image uploaded\"}, 400\n\
  87.    \n\
  88.    file = request.files['image']\n\
  89.    if not file:\n\
  90.        return {\"error\": \"No image uploaded\"}, 400\n\
  91. \n\
  92.    # Read the image in BGR format\n\
  93.    image = np.asarray(bytearray(file.read()), dtype=np.uint8)\n\
  94.    image = cv2.imdecode(image, cv2.IMREAD_COLOR)\n\
  95. \n\
  96.    if image is None:\n\
  97.        return {\"error\": \"Invalid image\"}, 400\n\
  98. \n\
  99.    # Perform inference\n\
  100.    outputs = predictor(image)\n\
  101.    \n\
  102.    # Visualize the predictions with color masks\n\
  103.    v = Visualizer(image[:, :, ::-1], metadata=metadata, instance_mode=ColorMode.SEGMENTATION)\n\
  104.    result = v.draw_instance_predictions(outputs[\"instances\"].to(\"cpu\"))\n\
  105.    \n\
  106.    # Convert result back to BGR format\n\
  107.    result_image = result.get_image()[:, :, ::-1]\n\
  108.    \n\
  109.    # Save the result temporarily\n\
  110.    output_path = 'output.jpg'\n\
  111.    cv2.imwrite(output_path, result_image)\n\
  112. \n\
  113.    # Return the modified image\n\
  114.    return send_file(output_path, mimetype='image/jpeg')\n\
  115. \n\
  116. if __name__ == '__main__':\n\
  117.    app.run(host='0.0.0.0', port=5001)\n\
  118. " > detectron_server.py
  119.  
  120.  
  121. # Expose the port for Flask
  122. EXPOSE 5001
  123.  
  124. # Run the server
  125. CMD ["python3", "detectron_server.py"]
  126.  
Advertisement
Comments
  • WareHouseHD
    263 days
    Comment was deleted
  • WareHouseHD
    263 days
    Comment was deleted
  • WareHouseHD
    263 days
    Comment was deleted
  • WareHouseHD
    263 days (edited)
    # Python 1.16 KB | 0 0
    1. # 1. save this file into folder named: `docker` in the name `Dockerfile`
    2. # 2. install docker desktop (for windows)
    3. # 3. from `cmd` run:
    4.  
    5. cd docker
    6. docker build -t detectron2:v0 .
    7. docker run --gpus all -d --shm-size=12gb -p 5001:5001 --env="DISPLAY" --volume="/tmp/.X11-unix:/tmp/.X11-unix:rw" --name=detectron2 detectron2:v0
    8.  
    9.  
    10. # and to test is from your PC execute the command:
    11. curl -X POST -F "image=@path/to/your/input.jpg" http://localhost:5001/detect -o output.jpg
    12.  
    13.  
    14. -------------------------  Cheat-sheet:  ---------------------------
    15.  
    16. # Build:
    17. docker build -t detectron2:v0 .
    18.  
    19. # Launch new container from the built docker image (require GPUs):
    20. docker run --gpus all -it --shm-size=12gb -p 5001:5001 --env="DISPLAY" --volume="/tmp/.X11-unix:/tmp/.X11-unix:rw" --name=detectron2 detectron2:v0
    21.  
    22. # launch the existing container:
    23. docker start detectron2
    24.  
    25. # Connect to the Container in Interactive Mode:
    26. docker exec -it detectron2 /bin/bash
    27.  
    28. # create a linux wheel file for detectron2:
    29. cd /home/tron2/detectron2_repo/
    30. python3 setup.py bdist_wheel
    31. # created file will be at:
    32. /home/tron2/detectron2_repo/dist/detectron2-0.6-cp311-cp311-linux_x86_64.whl
Add Comment
Please, Sign In to add comment
Advertisement