1- # Rust builder
2- FROM lukemathwalker/cargo-chef:latest-rust-1.71 AS chef
1+ FROM lukemathwalker/cargo-chef:latest-rust-1.75 AS chef
32WORKDIR /usr/src
43
54ARG CARGO_REGISTRIES_CRATES_IO_PROTOCOL=sparse
@@ -35,6 +34,7 @@ COPY router router
3534COPY launcher launcher
3635RUN cargo build --release
3736
37+
3838# Text Generation Inference base image for Intel
3939FROM intel/intel-extension-for-pytorch:2.1.10-xpu as base
4040
@@ -47,22 +47,49 @@ RUN wget http://nz2.archive.ubuntu.com/ubuntu/pool/main/o/openssl/libssl1.1_1.1.
4747RUN wget -O- https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB \
4848| gpg --dearmor | tee /usr/share/keyrings/oneapi-archive-keyring.gpg > /dev/null && echo "deb [signed-by=/usr/share/keyrings/oneapi-archive-keyring.gpg] https://apt.repos.intel.com/oneapi all main" | tee /etc/apt/sources.list.d/oneAPI.list
4949
50- RUN apt-get update && apt install -y intel-basekit xpu-smi
50+ RUN apt-get update && apt install -y intel-basekit xpu-smi cmake python3-dev
5151
5252# Text Generation Inference base env
5353ENV HUGGINGFACE_HUB_CACHE=/data \
5454 HF_HUB_ENABLE_HF_TRANSFER=1 \
5555 PORT=80
5656
5757
58+ WORKDIR /usr/src
59+ # Build pytorch and ipex
60+ RUN git clone https://github.com/intel/intel-extension-for-pytorch && cd intel-extension-for-pytorch && git checkout -b xpu_main origin/xpu-main
61+ RUN git clone https://github.com/pytorch/pytorch.git && cd pytorch && git checkout 209f2fa8ff86652f67d75c2f19bf9cb9942fd018 && git apply /usr/src/intel-extension-for-pytorch/torch_patches/00*.patch
62+
5863# Install server
5964COPY proto proto
6065COPY server server
6166COPY server/Makefile server/Makefile
6267RUN cd server && \
6368 make gen-server && \
6469 pip install -r requirements_common.txt && \
65- pip install ".[accelerate, peft]" --no-cache-dir
70+ pip install ".[accelerate, peft, outlines]" --no-cache-dir
71+
72+ ENV CCL_ROOT=/opt/intel/oneapi/ccl/2021.11
73+ ENV I_MPI_ROOT=/opt/intel/oneapi/mpi/2021.11
74+ ENV FI_PROVIDER_PATH=/opt/intel/oneapi/mpi/2021.11/opt/mpi/libfabric/lib/prov:/usr/lib/x86_64-linux-gnu/libfabric
75+ ENV DIAGUTIL_PATH=/opt/intel/oneapi/compiler/2024.0/etc/compiler/sys_check/sys_check.sh
76+ ENV CCL_CONFIGURATION=cpu_gpu_dpcpp
77+ ENV MANPATH=/opt/intel/oneapi/mpi/2021.11/share/man:/opt/intel/oneapi/mpi/2021.11/share/man:/opt/intel/oneapi/compiler/2024.0/documentation/en/man/common:
78+ ENV CMAKE_PREFIX_PATH=/opt/intel/oneapi/mkl/2024.0/lib/cmake:/opt/intel/oneapi/compiler/2024.0
79+ ENV CMPLR_ROOT=/opt/intel/oneapi/compiler/2024.0
80+ ENV LIBRARY_PATH=/opt/intel/oneapi/mpi/2021.11/lib:/opt/intel/oneapi/ccl/2021.11/lib/:/opt/intel/oneapi/mkl/2024.0/lib/:/opt/intel/oneapi/compiler/2024.0/lib
81+ ENV OCL_ICD_FILENAMES=libintelocl_emu.so:libalteracl.so:/opt/intel/oneapi/compiler/2024.0/lib/libintelocl.so
82+ ENV CLASSPATH=/opt/intel/oneapi/mpi/2021.11/share/java/mpi.jar:/opt/intel/oneapi/mpi/2021.11/share/java/mpi.jar
83+ ENV LD_LIBRARY_PATH=/opt/intel/oneapi/ccl/2021.11/lib/:/opt/intel/oneapi/mpi/2021.11/opt/mpi/libfabric/lib:/opt/intel/oneapi/mpi/2021.11/lib:/opt/intel/oneapi/mkl/2024.0/lib:/opt/intel/oneapi/compiler/2024.0/opt/compiler/lib:/opt/intel/oneapi/compiler/2024.0/lib:/opt/intel/oneapi/lib:/opt/intel/oneapi/lib/intel64:
84+ ENV MKLROOT=/opt/intel/oneapi/mkl/2024.0
85+ ENV NLSPATH=/opt/intel/oneapi/mkl/2024.0/share/locale/%l_%t/%N:/opt/intel/oneapi/compiler/2024.0/lib/locale/%l_%t/%N
86+ ENV PATH=/opt/intel/oneapi/mpi/2021.11/opt/mpi/libfabric/bin:/opt/intel/oneapi/mpi/2021.11/bin:/opt/intel/oneapi/mpi/2021.11/opt/mpi/libfabric/bin:/opt/intel/oneapi/mkl/2024.0/bin/:/opt/intel/oneapi/compiler/2024.0/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
87+ ENV CPATH=/opt/intel/oneapi/mpi/2021.11/include:/opt/intel/oneapi/ccl/2021.11/include:/opt/intel/oneapi/mkl/2024.0/include
88+ ENV CCL_ZE_IPC_EXCHANGE=sockets
89+
90+
91+ RUN pip uninstall -y torch && cd pytorch && git submodule update --init --recursive && python setup.py install
92+ RUN pip uninstall -y intel-extension-for-pytorch && cd intel-extension-for-pytorch && git submodule update --init --recursive && USE_AOT_DEVLIST='pvc' BUILD_SEPARATE_OPS=ON BUILD_WITH_CPU=ON USE_XETLA=ON python setup.py install
6693
6794# Install benchmarker
6895COPY --from=builder /usr/src/target/release/text-generation-benchmark /usr/local/bin/text-generation-benchmark
0 commit comments