Skip to content

Commit 0ed02eb

Browse files
chore: Generate build artifacts for 3.4.3 release
1 parent ed303e2 commit 0ed02eb

File tree

79 files changed

+3409
-0
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

79 files changed

+3409
-0
lines changed
Lines changed: 224 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,224 @@
1+
ARG TAG_FOR_BASE_MICROMAMBA_IMAGE
2+
FROM mambaorg/micromamba:$TAG_FOR_BASE_MICROMAMBA_IMAGE
3+
4+
ARG CUDA_MAJOR_MINOR_VERSION=''
5+
ARG ENV_IN_FILENAME
6+
ARG PINNED_ENV_IN_FILENAME
7+
ARG ARG_BASED_ENV_IN_FILENAME
8+
ARG IMAGE_VERSION
9+
LABEL "org.amazon.sagemaker-distribution.image.version"=$IMAGE_VERSION
10+
11+
ARG AMZN_BASE="/opt/amazon/sagemaker"
12+
ARG DB_ROOT_DIR="/opt/db"
13+
ARG DIRECTORY_TREE_STAGE_DIR="${AMZN_BASE}/dir-staging"
14+
15+
ARG NB_USER="sagemaker-user"
16+
ARG NB_UID=1000
17+
ARG NB_GID=100
18+
19+
# https://www.openssl.org/source/
20+
ARG FIPS_VALIDATED_SSL=3.0.8
21+
ARG MIN_REQUIRED_MICROMAMBA_VERSION=1.5.11
22+
23+
ENV SAGEMAKER_LOGGING_DIR="/var/log/sagemaker/"
24+
ENV STUDIO_LOGGING_DIR="/var/log/studio/"
25+
ENV EDITOR="nano"
26+
ENV IMAGE_VERSION=$IMAGE_VERSION
27+
ENV PINNED_MICROMAMBA_MINOR_VERSION="1.5.*"
28+
ENV SAGEMAKER_RECOVERY_MODE_HOME=/tmp/sagemaker-recovery-mode-home
29+
30+
USER root
31+
# Upgrade micromamba to the latest patch version in the pinned minor version range, if applicable
32+
RUN CURRENT_MICROMAMBA_VERSION=$(micromamba --version) && \
33+
echo "Current micromamba version: $CURRENT_MICROMAMBA_VERSION" && \
34+
if [[ "$CURRENT_MICROMAMBA_VERSION" == $PINNED_MICROMAMBA_MINOR_VERSION ]]; then \
35+
echo "Upgrading micromamba to the latest $PINNED_MICROMAMBA_MINOR_VERSION version..." && \
36+
micromamba self-update -c conda-forge --version "$MIN_REQUIRED_MICROMAMBA_VERSION" && \
37+
micromamba clean --all --yes --force-pkgs-dirs; \
38+
else \
39+
echo "Micromamba is already at version $CURRENT_MICROMAMBA_VERSION (outside $PINNED_MICROMAMBA_MINOR_VERSION). No upgrade performed."; \
40+
fi
41+
42+
RUN usermod "--login=${NB_USER}" "--home=/home/${NB_USER}" --move-home "-u ${NB_UID}" "${MAMBA_USER}" && \
43+
groupmod "--new-name=${NB_USER}" --non-unique "-g ${NB_GID}" "${MAMBA_USER}" && \
44+
# Update the expected value of MAMBA_USER for the
45+
# _entrypoint.sh consistency check.
46+
echo "${NB_USER}" > "/etc/arg_mamba_user" && \
47+
:
48+
ENV MAMBA_USER=$NB_USER
49+
ENV USER=$NB_USER
50+
51+
RUN apt-get update && apt-get upgrade -y && \
52+
apt-get install -y --no-install-recommends sudo gettext-base wget curl unzip git rsync build-essential openssh-client nano cron less mandoc jq ca-certificates gnupg && \
53+
# We just install tzdata below but leave default time zone as UTC. This helps packages like Pandas to function correctly.
54+
DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends tzdata krb5-user libkrb5-dev libsasl2-dev libsasl2-modules && \
55+
chmod g+w /etc/passwd && \
56+
echo "ALL ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers && \
57+
touch /etc/krb5.conf.lock && chown ${NB_USER}:${MAMBA_USER} /etc/krb5.conf* && \
58+
# Note that we do NOT run `rm -rf /var/lib/apt/lists/*` here. If we did, anyone building on top of our images will
59+
# not be able to run any `apt-get install` commands and that would hamper customizability of the images.
60+
curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" && \
61+
unzip awscliv2.zip && \
62+
sudo ./aws/install && \
63+
rm -rf aws awscliv2.zip && \
64+
: && \
65+
# Install Q CLI
66+
curl --proto '=https' --tlsv1.2 -sSf "https://desktop-release.q.us-east-1.amazonaws.com/1.12.7/q-x86_64-linux.zip" -o "q.zip" && \
67+
unzip q.zip && \
68+
Q_INSTALL_GLOBAL=true ./q/install.sh --no-confirm && \
69+
rm -rf q q.zip && \
70+
: && \
71+
echo "source /usr/local/bin/_activate_current_env.sh" | tee --append /etc/profile && \
72+
# CodeEditor - create server, user data dirs
73+
mkdir -p /opt/amazon/sagemaker/sagemaker-code-editor-server-data /opt/amazon/sagemaker/sagemaker-code-editor-user-data \
74+
&& chown $MAMBA_USER:$MAMBA_USER /opt/amazon/sagemaker/sagemaker-code-editor-server-data /opt/amazon/sagemaker/sagemaker-code-editor-user-data && \
75+
# create dir to store user data files
76+
mkdir -p /opt/amazon/sagemaker/user-data \
77+
&& chown $MAMBA_USER:$MAMBA_USER /opt/amazon/sagemaker/user-data && \
78+
# Merge in OS directory tree contents.
79+
mkdir -p ${DIRECTORY_TREE_STAGE_DIR}
80+
COPY dirs/ ${DIRECTORY_TREE_STAGE_DIR}/
81+
RUN rsync -a ${DIRECTORY_TREE_STAGE_DIR}/ / && \
82+
rm -rf ${DIRECTORY_TREE_STAGE_DIR} && \
83+
# CodeEditor - download the extensions
84+
mkdir -p /etc/code-editor/extensions && \
85+
while IFS= read -r url || [ -n "$url" ]; do \
86+
echo "Downloading extension from ${url}..." && \
87+
wget --no-check-certificate -P /etc/code-editor/extensions "${url}"; \
88+
done < /etc/code-editor/extensions.txt
89+
90+
USER $MAMBA_USER
91+
COPY --chown=$MAMBA_USER:$MAMBA_USER $ENV_IN_FILENAME *.in /tmp/
92+
COPY --chown=$MAMBA_USER:$MAMBA_USER $PINNED_ENV_IN_FILENAME *.in /tmp/
93+
94+
ARG MAMBA_DOCKERFILE_ACTIVATE=1
95+
ARG CONDA_OVERRIDE_CUDA=$CUDA_MAJOR_MINOR_VERSION
96+
97+
# Make sure that $ENV_IN_FILENAME and $PINNED_ENV_IN_FILENAME has a newline at the end before the `tee` command runs.
98+
# Otherwise, nasty things will happen.
99+
RUN if [[ -z $ARG_BASED_ENV_IN_FILENAME ]] ; \
100+
then echo 'No ARG_BASED_ENV_IN_FILENAME passed' ; \
101+
else envsubst < /tmp/$ARG_BASED_ENV_IN_FILENAME | tee --append /tmp/$ENV_IN_FILENAME ; \
102+
fi && \
103+
# Enforce dependencies are all installed from conda-forge
104+
micromamba install -y --name base --file /tmp/$ENV_IN_FILENAME --file /tmp/$PINNED_ENV_IN_FILENAME && \
105+
mkdir -p $SAGEMAKER_RECOVERY_MODE_HOME && \
106+
chown $MAMBA_USER:$MAMBA_USER $SAGEMAKER_RECOVERY_MODE_HOME && \
107+
SUPERVISOR_VERSION=$(grep "^conda-forge::supervisor\[" /tmp/$ENV_IN_FILENAME) && \
108+
JUPYTERLAB_VERSION=$(grep "^conda-forge::jupyterlab\[" /tmp/$ENV_IN_FILENAME) && \
109+
SAGEMAKER_JUPYTERLAB_VERSION=$(grep "^conda-forge::sagemaker-jupyterlab-extension" /tmp/$ENV_IN_FILENAME) && \
110+
echo "Installing in sagemaker-recovery-mode micromamba environment: $JUPYTERLAB_VERSION $SAGEMAKER_JUPYTERLAB_VERSION" && \
111+
micromamba create --prefix /opt/conda/envs/sagemaker-recovery-mode && \
112+
micromamba install --prefix /opt/conda/envs/sagemaker-recovery-mode -y $JUPYTERLAB_VERSION $SAGEMAKER_JUPYTERLAB_VERSION $SUPERVISOR_VERSION && \
113+
micromamba clean --all --yes --force-pkgs-dirs && \
114+
rm -rf /tmp/*.in && \
115+
sudo ln -s $(which python3) /usr/bin/python && \
116+
# Update npm version
117+
npm i -g npm && \
118+
# Enforce to use `conda-forge` as only channel, by removing `defaults`
119+
conda config --remove channels defaults && \
120+
micromamba config append channels conda-forge --env && \
121+
# Configure CodeEditor - Install extensions and set preferences
122+
extensionloc=/opt/amazon/sagemaker/sagemaker-code-editor-server-data/extensions && mkdir -p "${extensionloc}" \
123+
# Loop through all vsix files in /etc/code-editor/extensions and install them
124+
&& for ext in /etc/code-editor/extensions/*.vsix; do \
125+
echo "Installing extension ${ext}..."; \
126+
sagemaker-code-editor --install-extension "${ext}" --extensions-dir "${extensionloc}" --server-data-dir /opt/amazon/sagemaker/sagemaker-code-editor-server-data --user-data-dir /opt/amazon/sagemaker/sagemaker-code-editor-user-data; \
127+
done \
128+
# Copy the settings
129+
&& cp /etc/code-editor/code_editor_machine_settings.json /opt/amazon/sagemaker/sagemaker-code-editor-server-data/data/Machine/settings.json && \
130+
cp /etc/code-editor/code_editor_user_settings.json /opt/amazon/sagemaker/sagemaker-code-editor-server-data/data/User/settings.json && \
131+
# Install glue kernels, and move to shared directory
132+
# Also patching base kernel so Studio background code doesn't start session silently
133+
install-glue-kernels && \
134+
SITE_PACKAGES=$(pip show aws-glue-sessions | grep Location | awk '{print $2}') && \
135+
jupyter-kernelspec install $SITE_PACKAGES/aws_glue_interactive_sessions_kernel/glue_pyspark --user && \
136+
jupyter-kernelspec install $SITE_PACKAGES/aws_glue_interactive_sessions_kernel/glue_spark --user && \
137+
mv /home/sagemaker-user/.local/share/jupyter/kernels/glue_pyspark /opt/conda/share/jupyter/kernels && \
138+
mv /home/sagemaker-user/.local/share/jupyter/kernels/glue_spark /opt/conda/share/jupyter/kernels && \
139+
sed -i '/if not store_history and (/i\ if "sm_analytics_runtime_check" in code:\n return await self._complete_cell()\n' \
140+
"$SITE_PACKAGES/aws_glue_interactive_sessions_kernel/glue_kernel_base/BaseKernel.py" && \
141+
# Install FIPS Provider for OpenSSL, on top of existing OpenSSL installation
142+
# v3.0.8 is latest FIPS validated provider, so this is the one we install
143+
# But we need to run tests against the installed version.
144+
# see https://github.com/openssl/openssl/blob/master/README-FIPS.md https://www.openssl.org/source/
145+
INSTALLED_SSL=$(micromamba list | grep openssl | tr -s ' ' | cut -d ' ' -f 3 | head -n 1) && \
146+
# download source code for installed, and FIPS validated openssl versions
147+
curl -L https://github.com/openssl/openssl/releases/download/openssl-$FIPS_VALIDATED_SSL/openssl-$FIPS_VALIDATED_SSL.tar.gz > openssl-$FIPS_VALIDATED_SSL.tar.gz && \
148+
curl -L https://github.com/openssl/openssl/releases/download/openssl-$INSTALLED_SSL/openssl-$INSTALLED_SSL.tar.gz > openssl-$INSTALLED_SSL.tar.gz && \
149+
tar -xf openssl-$FIPS_VALIDATED_SSL.tar.gz && tar -xf openssl-$INSTALLED_SSL.tar.gz && cd openssl-$FIPS_VALIDATED_SSL && \
150+
# Configure both versions to enable FIPS and build
151+
./Configure enable-fips --prefix=/opt/conda --openssldir=/opt/conda/ssl && make && \
152+
cd ../openssl-$INSTALLED_SSL && \
153+
./Configure enable-fips --prefix=/opt/conda --openssldir=/opt/conda/ssl && make && \
154+
# Copy validated provider to installed version for testing
155+
cp ../openssl-$FIPS_VALIDATED_SSL/providers/fips.so providers/. && \
156+
cp ../openssl-$FIPS_VALIDATED_SSL/providers/fipsmodule.cnf providers/. && \
157+
make tests && cd ../openssl-$FIPS_VALIDATED_SSL && \
158+
# After tests pass, install FIPS provider and remove source code
159+
make install_fips && cd .. && rm -rf ./openssl-* && \
160+
# Create new config file with fips-enabled. Then user can override OPENSSL_CONF to enable FIPS
161+
# e.g. export OPENSSL_CONF=/opt/conda/ssl/openssl-fips.cnf
162+
cp /opt/conda/ssl/openssl.cnf /opt/conda/ssl/openssl-fips.cnf && \
163+
sed -i "s:# .include fipsmodule.cnf:.include /opt/conda/ssl/fipsmodule.cnf:" /opt/conda/ssl/openssl-fips.cnf && \
164+
sed -i 's:# fips = fips_sect:fips = fips_sect:' /opt/conda/ssl/openssl-fips.cnf && \
165+
# Install Kerberos.
166+
# Make sure no dependency is added/updated
167+
pip install "krb5>=0.5.1,<0.6" && \
168+
pip show krb5 | grep Require | xargs -i sh -c '[ $(echo {} | cut -d: -f2 | wc -w) -eq 0 ] ' && \
169+
# https://stackoverflow.com/questions/122327
170+
SYSTEM_PYTHON_PATH=$(python3 -c "from __future__ import print_function;import sysconfig; print(sysconfig.get_paths().get('purelib'))") && \
171+
# Remove SparkRKernel as it's not supported \
172+
jupyter-kernelspec remove -f -y sparkrkernel && \
173+
# Patch Sparkmagic lib to support Custom Certificates \
174+
# https://github.com/jupyter-incubator/sparkmagic/pull/435/files \
175+
cp -a ${SYSTEM_PYTHON_PATH}/sagemaker_studio_analytics_extension/patches/configuration.py ${SYSTEM_PYTHON_PATH}/sparkmagic/utils/ && \
176+
cp -a ${SYSTEM_PYTHON_PATH}/sagemaker_studio_analytics_extension/patches/reliablehttpclient.py ${SYSTEM_PYTHON_PATH}/sparkmagic/livyclientlib/reliablehttpclient.py && \
177+
sed -i 's= "python"= "/opt/conda/bin/python"=g' /opt/conda/share/jupyter/kernels/pysparkkernel/kernel.json /opt/conda/share/jupyter/kernels/sparkkernel/kernel.json && \
178+
sed -i 's="Spark"="SparkMagic Spark"=g' /opt/conda/share/jupyter/kernels/sparkkernel/kernel.json && \
179+
sed -i 's="PySpark"="SparkMagic PySpark"=g' /opt/conda/share/jupyter/kernels/pysparkkernel/kernel.json && \
180+
# Configure RTC - disable jupyter_collaboration by default
181+
jupyter labextension disable @jupyter/collaboration-extension && \
182+
# Disable docprovider-extension for v3 and above images
183+
jupyter labextension disable @jupyter/docprovider-extension
184+
185+
# Patch glue kernels to use kernel wrapper
186+
COPY patch_glue_pyspark.json /opt/conda/share/jupyter/kernels/glue_pyspark/kernel.json
187+
COPY patch_glue_spark.json /opt/conda/share/jupyter/kernels/glue_spark/kernel.json
188+
189+
USER root
190+
191+
# Create logging directories for supervisor
192+
RUN mkdir -p $SAGEMAKER_LOGGING_DIR && \
193+
chmod a+rw $SAGEMAKER_LOGGING_DIR && \
194+
mkdir -p ${STUDIO_LOGGING_DIR} && \
195+
chown ${NB_USER}:${MAMBA_USER} ${STUDIO_LOGGING_DIR} && \
196+
# Create sagemaker pysdk admin default config directory
197+
mkdir -p /etc/xdg/sagemaker && \
198+
chmod a+rw /etc/xdg/sagemaker && \
199+
# Clean up CodeEditor artifacts
200+
rm -rf /etc/code-editor && \
201+
# Create supervisord runtime directory
202+
mkdir -p /var/run/supervisord && \
203+
chmod a+rw /var/run/supervisord && \
204+
# Create root directory for DB
205+
# Create logging directories for supervisor
206+
mkdir -p $DB_ROOT_DIR && \
207+
chmod a+rw $DB_ROOT_DIR && \
208+
HOME_DIR="/home/${NB_USER}/licenses" \
209+
&& mkdir -p ${HOME_DIR} \
210+
&& curl -o ${HOME_DIR}/oss_compliance.zip https://aws-dlinfra-utilities.s3.amazonaws.com/oss_compliance.zip \
211+
&& unzip ${HOME_DIR}/oss_compliance.zip -d ${HOME_DIR}/ \
212+
&& cp ${HOME_DIR}/oss_compliance/test/testOSSCompliance /usr/local/bin/testOSSCompliance \
213+
&& chmod +x /usr/local/bin/testOSSCompliance \
214+
&& chmod +x ${HOME_DIR}/oss_compliance/generate_oss_compliance.sh \
215+
&& ${HOME_DIR}/oss_compliance/generate_oss_compliance.sh ${HOME_DIR} python \
216+
&& rm -rf ${HOME_DIR}/oss_compliance*
217+
218+
# Explicitly disable BuildKit for SM Studio Docker functionality
219+
ENV DOCKER_BUILDKIT=0
220+
ENV PATH="/etc/sagemaker-inference-server:/opt/conda/bin:/opt/conda/condabin:$PATH"
221+
WORKDIR "/home/${NB_USER}"
222+
ENV SHELL=/bin/bash
223+
ENV OPENSSL_MODULES=/opt/conda/lib64/ossl-modules/
224+
USER $MAMBA_USER
Lines changed: 68 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,68 @@
1+
# This file is auto-generated.
2+
conda-forge::mcp
3+
conda-forge::uv
4+
conda-forge::s3fs
5+
conda-forge::seaborn
6+
conda-forge::jupyter-activity-monitor-extension
7+
conda-forge::mlflow
8+
conda-forge::sagemaker-mlflow
9+
conda-forge::langchain-aws
10+
conda-forge::jupyter-collaboration
11+
conda-forge::sagemaker-code-editor
12+
conda-forge::amazon_sagemaker_sql_editor
13+
conda-forge::amazon-sagemaker-sql-magic
14+
conda-forge::amazon-sagemaker-jupyter-ai-q-developer
15+
conda-forge::amazon-q-developer-jupyterlab-ext
16+
conda-forge::langchain
17+
conda-forge::fastapi
18+
conda-forge::uvicorn
19+
conda-forge::pytorch
20+
conda-forge::tensorflow
21+
conda-forge::python
22+
conda-forge::pip
23+
conda-forge::torchvision
24+
conda-forge::numpy
25+
conda-forge::pandas
26+
conda-forge::scikit-learn
27+
conda-forge::jinja2
28+
conda-forge::matplotlib-base
29+
conda-forge::sagemaker-headless-execution-driver
30+
conda-forge::ipython
31+
conda-forge::scipy
32+
conda-forge::keras
33+
conda-forge::py-xgboost-cpu
34+
conda-forge::jupyterlab
35+
conda-forge::ipywidgets
36+
conda-forge::conda
37+
conda-forge::boto3
38+
conda-forge::sagemaker-python-sdk
39+
conda-forge::supervisor
40+
conda-forge::autogluon
41+
conda-forge::aws-glue-sessions
42+
conda-forge::sagemaker-kernel-wrapper
43+
conda-forge::jupyter-ai
44+
conda-forge::jupyter-scheduler
45+
conda-forge::jupyter-lsp
46+
conda-forge::jupyterlab-lsp
47+
conda-forge::python-lsp-server
48+
conda-forge::jupyterlab-git
49+
conda-forge::notebook
50+
conda-forge::altair
51+
conda-forge::sagemaker-studio-analytics-extension
52+
conda-forge::jupyter-dash
53+
conda-forge::sagemaker-jupyterlab-extension
54+
conda-forge::sagemaker-jupyterlab-emr-extension
55+
conda-forge::amazon-sagemaker-jupyter-scheduler
56+
conda-forge::jupyter-server-proxy
57+
conda-forge::pyhive
58+
conda-forge::python-gssapi
59+
conda-forge::tf-keras
60+
conda-forge::git-remote-codecommit
61+
conda-forge::docker-cli
62+
conda-forge::aioboto3
63+
conda-forge::sagemaker-studio-cli
64+
conda-forge::sagemaker-studio
65+
conda-forge::sagemaker-studio-dataengineering-sessions
66+
conda-forge::sagemaker-studio-dataengineering-extensions
67+
conda-forge::amzn-sagemaker-aiops-jupyterlab-extension
68+
conda-forge::aws-s3-access-grants-boto3-plugin
Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
conda-forge::dash[version='<=2.18.1']
2+
conda-forge::evaluate[version='<0.4.2']
3+
conda-forge::catboost[version='>=1.1.1,<1.3.0',build='*cpu*']
4+
conda-forge::libsqlite[version='<3.49.0']
5+
conda-forge::urllib3[version='<2']
6+
conda-forge::papermill[version='>=2.6.0']
7+
conda-forge::plotly[version='<6.1.0']
Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
{
2+
"python.terminal.activateEnvironment": false,
3+
"python.defaultInterpreterPath": "/opt/conda/bin/python"
4+
}
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
{
2+
"extensions.autoUpdate": false
3+
}
Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
https://open-vsx.org/api/ms-toolsai/jupyter/2024.5.0/file/ms-toolsai.jupyter-2024.5.0.vsix
2+
https://open-vsx.org/api/ms-python/python/2023.20.0/file/ms-python.python-2023.20.0.vsix
3+
https://open-vsx.org/api/amazonwebservices/aws-toolkit-vscode/3.69.0/file/amazonwebservices.aws-toolkit-vscode-3.69.0.vsix
4+
https://open-vsx.org/api/amazonwebservices/amazon-q-vscode/1.58.0/file/amazonwebservices.amazon-q-vscode-1.58.0.vsix
Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
envs_dirs:
2+
- ~/.conda/envs
3+
- /opt/conda/envs
4+
pkgs_dirs:
5+
- ~/.conda/pkgs
6+
- /opt/conda/pkgs
Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
# Default Jupyter server config
2+
# Note: those config can be overridden by user-level configs.
3+
4+
c.ServerApp.terminado_settings = {"shell_command": ["/bin/bash"]}
5+
c.ServerApp.tornado_settings = {"compress_response": True}
6+
7+
# Do not delete files to trash. Instead, permanently delete files.
8+
c.FileContentsManager.delete_to_trash = False
9+
10+
# Allow deleting non-empty directory via file browser. Related documentation:
11+
# https://github.com/jupyter-server/jupyter_server/blob/main/jupyter_server/services/contents/filemanager.py#L125-L129
12+
c.FileContentsManager.always_delete_dir = True
13+
14+
# Enable `allow_hidden` by default, so hidden files are accessible via Jupyter server
15+
# Related documentation: https://jupyterlab.readthedocs.io/en/stable/user/files.html#displaying-hidden-files
16+
c.ContentsManager.allow_hidden = True
17+
18+
# This will set the LanguageServerManager.extra_node_roots setting if amazon_sagemaker_sql_editor exists in the
19+
# environment. Ignore otherwise, don't fail the JL server start
20+
# Related documentation: https://jupyterlab-lsp.readthedocs.io/en/v3.4.0/Configuring.html
21+
try:
22+
import os
23+
24+
module = __import__("amazon_sagemaker_sql_editor")
25+
module_location = os.path.dirname(module.__file__)
26+
c.LanguageServerManager.extra_node_roots = [f"{module_location}/sql-language-server"]
27+
except:
28+
pass
Lines changed: 38 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,38 @@
1+
#!/bin/bash
2+
3+
set -eux
4+
5+
# Check if parameter is provided
6+
if [ $# -ne 1 ]; then
7+
echo "Usage: $0 [smus|studio-ai]"
8+
exit 1
9+
fi
10+
11+
# Validate parameter
12+
case "$1" in
13+
"smus")
14+
bash "/etc/patches/smus-script/replace-job-with-schedule.sh"
15+
PATCH_DIR="/etc/patches/smus"
16+
;;
17+
"studio-ai")
18+
PATCH_DIR="/etc/patches/studio-ai"
19+
;;
20+
*)
21+
echo "Error: Parameter must be either 'smus' or 'studio-ai'"
22+
exit 1
23+
;;
24+
esac
25+
26+
# Check if patch directory exists
27+
if [ ! -d "$PATCH_DIR" ]; then
28+
echo "Error: Patch directory $PATCH_DIR does not exist"
29+
exit 1
30+
fi
31+
32+
# Patch files can be generated via "diff -u /path/to/original_file /path/to/new_file > XXX_bad_package.patch"
33+
# See https://www.thegeekstuff.com/2014/12/patch-command-examples/
34+
for PATCHFILE in "$PATCH_DIR"/*.patch; do
35+
[ -f "$PATCHFILE" ] || continue
36+
echo "Applying $PATCHFILE"
37+
(cd "/opt/conda" && patch --strip=3 < "$PATCHFILE")
38+
done

0 commit comments

Comments
 (0)