Dockerfile.trtis 1.0 KB

12345678910111213141516171819202122232425262728293031323334353637
  1. ARG TRTIS_IMAGE=nvcr.io/nvidia/tensorrtserver:20.02-py3
  2. FROM ${TRTIS_IMAGE}
  3. RUN mkdir -p /workspace/trt-tacotron2-waveglow
  4. WORKDIR /workspace/trt-tacotron2-waveglow
  5. # Download custom backend SDK
  6. RUN wget https://github.com/NVIDIA/tensorrt-inference-server/releases/download/v1.11.0/v1.11.0_ubuntu1804.custombackend.tar.gz
  7. RUN tar xf v1.11.0_ubuntu1804.custombackend.tar.gz && mv custom-backend-sdk ./trtis_sdk
  8. # install cmake
  9. RUN apt-get update && apt-get install -qy cmake && apt-get clean
  10. # build the source code
  11. ADD src/ "./src"
  12. ADD CMakeLists.txt "./"
  13. ADD configure "./"
  14. RUN ./configure --trtis
  15. RUN make
  16. ARG TACOTRON2_MODEL="tacotron.json"
  17. ARG WAVEGLOW_MODEL="waveglow.onnx"
  18. ARG DENOISER_MODEL="denoiser.json"
  19. RUN mkdir -p "/models" "/engines"
  20. ADD "${TACOTRON2_MODEL}" /models/
  21. ADD "${WAVEGLOW_MODEL}" /models/
  22. ADD "${DENOISER_MODEL}" /models/
  23. ADD model-config/tacotron2waveglow /models/tacotron2waveglow
  24. RUN mkdir -p /models/tacotron2waveglow/1
  25. RUN cp -v "./build/lib/libtt2i_trtis.so" /models/tacotron2waveglow/1/
  26. ADD scripts "./scripts"