diff --git a/docker/1.3-1/base/Dockerfile.cpu b/docker/1.3-1/base/Dockerfile.cpu index 6e98550b..13199ed1 100644 --- a/docker/1.3-1/base/Dockerfile.cpu +++ b/docker/1.3-1/base/Dockerfile.cpu @@ -8,9 +8,9 @@ ARG MINICONDA_VERSION=4.9.2 ARG CONDA_PY_VERSION=39 ARG CONDA_CHECKSUM="b4e46fcc8029e2cfa731b788f25b1d36" ARG CONDA_PKG_VERSION=4.10.1 -ARG PYTHON_VERSION=3.7.10 -ARG PYARROW_VERSION=1.0 -ARG MLIO_VERSION=0.7.0 +ARG PYTHON_VERSION=3.8.13 +ARG PYARROW_VERSION=14.0.1 +ARG MLIO_VERSION=0.9.0 ARG XGBOOST_VERSION=1.3.3 ENV DEBIAN_FRONTEND=noninteractive @@ -44,6 +44,8 @@ RUN rm /etc/apt/sources.list.d/cuda.list && \ # MLIO build dependencies # Official Ubuntu APT repositories do not contain an up-to-date version of CMake required to build MLIO. # Kitware contains the latest version of CMake. + wget http://es.archive.ubuntu.com/ubuntu/pool/main/libf/libffi/libffi7_3.3-4_amd64.deb && \ + dpkg -i libffi7_3.3-4_amd64.deb && \ apt-get -y install --no-install-recommends \ apt-transport-https \ ca-certificates \ @@ -93,6 +95,7 @@ RUN echo "conda ${CONDA_PKG_VERSION}" >> /miniconda3/conda-meta/pinned && \ conda config --system --set show_channel_urls true && \ echo "python ${PYTHON_VERSION}.*" >> /miniconda3/conda-meta/pinned && \ conda install -c conda-forge python=${PYTHON_VERSION} && \ + pip install requests==2.27.0 && \ conda install conda=${CONDA_PKG_VERSION} && \ conda update -y conda && \ conda install -c conda-forge pyarrow=${PYARROW_VERSION} && \ @@ -113,7 +116,7 @@ RUN echo "conda ${CONDA_PKG_VERSION}" >> /miniconda3/conda-meta/pinned && \ python3 -m pip install typing && \ python3 -m pip install --upgrade pip && \ python3 -m pip install dist/*.whl && \ - cp -r /tmp/mlio/build/third-party/lib/intel64/gcc4.7/* /usr/local/lib/ && \ + cp -r /tmp/mlio/build/third-party/lib/libtbb* /usr/local/lib/ && \ ldconfig && \ rm -rf /tmp/mlio diff --git a/docker/1.3-1/final/Dockerfile.cpu b/docker/1.3-1/final/Dockerfile.cpu index f4e87ea6..a6d534b2 100644 --- a/docker/1.3-1/final/Dockerfile.cpu +++ b/docker/1.3-1/final/Dockerfile.cpu @@ -1,5 +1,5 @@ ARG SAGEMAKER_XGBOOST_VERSION=1.3-1 -ARG PYTHON_VERSION=3.7 +ARG PYTHON_VERSION=3.8 FROM xgboost-container-base:${SAGEMAKER_XGBOOST_VERSION}-cpu-py3 @@ -19,7 +19,7 @@ RUN python3 -m pip install git+https://github.com/awslabs/sagemaker-debugger.git # Copy wheel to container # ########################### COPY dist/sagemaker_xgboost_container-2.0-py2.py3-none-any.whl /sagemaker_xgboost_container-1.0-py2.py3-none-any.whl -RUN rm -rf /miniconda3/lib/python3.7/site-packages/numpy-1.21.6.dist-info && \ +RUN rm -rf /miniconda3/lib/python3.8/site-packages/numpy-1.21.6.dist-info && \ python3 -m pip install --no-cache /sagemaker_xgboost_container-1.0-py2.py3-none-any.whl && \ python3 -m pip uninstall -y typing && \ rm /sagemaker_xgboost_container-1.0-py2.py3-none-any.whl diff --git a/requirements.txt b/requirements.txt index 2d454d80..f1790d5f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,24 +3,25 @@ PyYAML==5.4.1 Pillow==9.1.1 boto3==1.17.52 botocore==1.20.52 -cryptography==35.0.0 +cryptography==39.0.1 gunicorn==19.10.0 itsdangerous==2.0.1 matplotlib==3.4.1 multi-model-server==1.1.2 -numpy==1.21.6 +numpy==1.24.1 pandas==1.2.4 protobuf==3.20.1 psutil==5.6.7 # sagemaker-containers requires psutil 5.6.7 python-dateutil==2.8.1 -requests==2.25.1 retrying==1.3.3 sagemaker-containers==2.8.6.post2 sagemaker-inference==1.5.5 scikit-learn==0.24.1 -scipy==1.6.2 +scipy==1.8.0 urllib3==1.26.5 wheel==0.36.2 jinja2==2.10.2 MarkupSafe==1.1.1 Werkzeug==0.15.6 +certifi==2023.7.22 +gevent==23.9.1 diff --git a/src/sagemaker_xgboost_container/algorithm_mode/serve_utils.py b/src/sagemaker_xgboost_container/algorithm_mode/serve_utils.py index 61b708f1..8bbcdf84 100644 --- a/src/sagemaker_xgboost_container/algorithm_mode/serve_utils.py +++ b/src/sagemaker_xgboost_container/algorithm_mode/serve_utils.py @@ -91,8 +91,8 @@ def _get_sparse_matrix_from_libsvm(payload): data.append(val) row = np.array(row) - col = np.array(col).astype(np.int) - data = np.array(data).astype(np.float) + col = np.array(col).astype(int) + data = np.array(data).astype(float) if not (len(row) == len(col) and len(col) == len(data)): raise RuntimeError("Dimension checking failed when transforming sparse matrix.") @@ -106,7 +106,7 @@ def parse_content_data(input_data, input_content_type): if content_type == CSV: try: decoded_payload = payload.strip().decode("utf-8") - dtest = encoder.csv_to_dmatrix(decoded_payload, dtype=np.float) + dtest = encoder.csv_to_dmatrix(decoded_payload, dtype=float) except Exception as e: raise RuntimeError("Loading csv data failed with Exception, " "please ensure data is in csv format:\n {}\n {}".format(type(e), e)) diff --git a/tox.ini b/tox.ini index 21ea2ebc..c2d19c34 100644 --- a/tox.ini +++ b/tox.ini @@ -17,9 +17,9 @@ deps = -r{toxinidir}/requirements.txt -r{toxinidir}/test-requirements.txt conda_deps= - pyarrow==1.0.1 + pyarrow==14.0.1 tbb==2020.2 - mlio-py==0.7.0 + mlio-py==0.9.0 conda_channels= conda-forge mlio