Skip to content

Commit 68afec9

Browse files
committed
feat: pre-release of ext v1.4.1
1 parent ce1d28d commit 68afec9

File tree

3 files changed

+100
-94
lines changed

3 files changed

+100
-94
lines changed

vre-swan-cern/Dockerfile

Lines changed: 2 additions & 94 deletions
Original file line numberDiff line numberDiff line change
@@ -1,98 +1,6 @@
1-
# ARG VERSION_PARENT=v0.0.33
2-
# # FROM gitlab-registry.cern.ch/swan/docker-images/jupyter/swan:${VERSION_PARENT}
3-
# FROM ghcr.io/vre-hub/vre-swan:latest
1+
FROM gitlab-registry.cern.ch/swan/docker-images/jupyter/swan-cern:v0.0.58
42

5-
ARG VERSION_PARENT=v0.0.36
6-
7-
FROM gitlab-registry.cern.ch/swan/docker-images/jupyter/swan:${VERSION_PARENT}
8-
9-
LABEL maintainer="swan-admins@cern.ch"
10-
ARG NB_UID="1000"
11-
ARG BUILD_TAG=daily
12-
ENV VERSION_DOCKER_IMAGE=$BUILD_TAG
133
ENV RUCIO_JUPYTERLAB_VERSION="1.4.1"
14-
ENV RUCIO_LOG_LEVEL="info"
15-
16-
RUN echo "Building swan-cern image with tag ${VERSION_DOCKER_IMAGE} from parent tag ${VERSION_PARENT}."
17-
18-
# User session configuration scripts
19-
# Add scripts to be run before the jupyter server starts
20-
COPY scripts/before-notebook.d/* /usr/local/bin/before-notebook.d/
21-
COPY scripts/others/userconfig.sh /srv/singleuser/scripts
22-
23-
# dask-labextension needs to be installed first so its lab extension
24-
# gets disabled automatically when installing swandask
25-
RUN pip install --no-deps --no-cache-dir dask-labextension==7.0.0
264

27-
# Install all of our extensions required to access Spark, HDFS and Dask.
28-
# Ignore dependencies because they have already been installed or come from CVMFS
29-
RUN pip install --no-deps --no-cache-dir \
30-
sparkconnector==3.0.9 \
31-
sparkmonitor==3.1.0 \
32-
swanportallocator==2.0.0 \
33-
swandask==0.0.5
34-
35-
# CERN-VRE
36-
# RUN pip install --no-deps --no-cache-dir rucio-jupyterlab==${RUCIO_JUPYTERLAB_VERSION}
375
COPY ./wheels/rucio_jupyterlab-*.whl /tmp/
38-
RUN pip install --no-deps --no-cache-dir /tmp/rucio_jupyterlab-*.whl
39-
40-
# Install swandaskcluster in the lib directory that is exposed to notebooks and
41-
# terminals, which need it to do automatic TLS configuration for Dask clients
42-
RUN pip install --no-deps --no-cache-dir --target ${SWAN_LIB_DIR}/nb_term_lib \
43-
swandaskcluster==3.1.0
44-
45-
# Overwrite Python configuration, including section for the VRE
46-
# Add jupyter notebook configuration
47-
ADD python/jupyter_server_config.py /home/${NB_USER}/.jupyter/jupyter_server_config.py
48-
49-
# Add helper scripts
50-
COPY scripts/others/* /srv/singleuser/
51-
52-
# Add dask configuration file
53-
# Dask config: lab extension must use SwanHTCondorCluster
54-
ADD config/dask-labextension.yaml /etc/dask/labextension.yaml
55-
56-
USER root
57-
58-
# Install Spark extensions for classic UI
59-
RUN jupyter nbclassic-extension install --py --system sparkconnector && \
60-
jupyter nbclassic-extension install --py --system sparkmonitor
61-
62-
# HTCondor requirements
63-
RUN dnf install -y \
64-
# required by condor_submit
65-
perl-Archive-Tar \
66-
perl-Authen-Krb5 \
67-
perl-Sys-Hostname \
68-
perl-Sys-Syslog && \
69-
dnf clean all && \
70-
rm -rf /var/cache/dnf
71-
# Required for kerberos authentication to work
72-
ADD config/krb5.conf.no_rdns /etc/krb5.conf.no_rdns
73-
74-
# Import dependency of swandaskcluster, which is required by Dask clients
75-
# created from a notebook/terminal to create a Security object
76-
RUN ln -s $(pip show swanportallocator | grep -oP 'Location: \K.*')/swanportallocator ${SWAN_LIB_DIR}/nb_term_lib
77-
78-
# Create symlinks for the remaining swan extensions, because
79-
# they need to be accessible in the user environment
80-
RUN ln -s $(pip show sparkconnector | grep -oP 'Location: \K.*')/sparkconnector ${SWAN_LIB_DIR}/extensions/ && \
81-
ln -s $(pip show sparkmonitor | grep -oP 'Location: \K.*')/sparkmonitor ${SWAN_LIB_DIR}/extensions/ && \
82-
ln -s $(pip show swandask | grep -oP 'Location: \K.*')/swandask ${SWAN_LIB_DIR}/extensions/ && \
83-
ln -s $(pip show dask-labextension | grep -oP 'Location: \K.*')/dask_labextension ${SWAN_LIB_DIR}/extensions/ && \
84-
# dependency of dask-labextension
85-
ln -s $(pip show jupyter-server-proxy | grep -oP 'Location: \K.*')/jupyter_server_proxy ${SWAN_LIB_DIR}/extensions/ && \
86-
# dependency of jupyter-server-proxy
87-
ln -s $(pip show simpervisor | grep -oP 'Location: \K.*')/simpervisor ${SWAN_LIB_DIR}/extensions/
88-
89-
# CERN-VRE
90-
# The fact that the rucio_jupyterlab does not correspond with rucio-jupyterlab is how the package was dev
91-
RUN ln -s $(pip show rucio-jupyterlab | grep -oP 'Location: \K.*')/rucio_jupyterlab ${SWAN_LIB_DIR}/extensions/
92-
# RUN jupyter serverextension enable --py rucio_jupyterlab --sys-prefix
93-
94-
# Grant scripts execution permissions
95-
RUN chmod +x /usr/local/bin/before-notebook.d/*
96-
97-
# Switch back to jovyan to avoid accidental container runs as root
98-
USER ${NB_UID}
6+
RUN pip install --no-deps --no-cache-dir /tmp/rucio_jupyterlab-*.whl

vre-swan-cern/Dockerfile.old

Lines changed: 98 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,98 @@
1+
# ARG VERSION_PARENT=v0.0.33
2+
# # FROM gitlab-registry.cern.ch/swan/docker-images/jupyter/swan:${VERSION_PARENT}
3+
# FROM ghcr.io/vre-hub/vre-swan:latest
4+
5+
ARG VERSION_PARENT=v0.0.36
6+
7+
FROM gitlab-registry.cern.ch/swan/docker-images/jupyter/swan:${VERSION_PARENT}
8+
9+
LABEL maintainer="swan-admins@cern.ch"
10+
ARG NB_UID="1000"
11+
ARG BUILD_TAG=daily
12+
ENV VERSION_DOCKER_IMAGE=$BUILD_TAG
13+
ENV RUCIO_JUPYTERLAB_VERSION="1.4.1"
14+
ENV RUCIO_LOG_LEVEL="info"
15+
16+
RUN echo "Building swan-cern image with tag ${VERSION_DOCKER_IMAGE} from parent tag ${VERSION_PARENT}."
17+
18+
# User session configuration scripts
19+
# Add scripts to be run before the jupyter server starts
20+
COPY scripts/before-notebook.d/* /usr/local/bin/before-notebook.d/
21+
COPY scripts/others/userconfig.sh /srv/singleuser/scripts
22+
23+
# dask-labextension needs to be installed first so its lab extension
24+
# gets disabled automatically when installing swandask
25+
RUN pip install --no-deps --no-cache-dir dask-labextension==7.0.0
26+
27+
# Install all of our extensions required to access Spark, HDFS and Dask.
28+
# Ignore dependencies because they have already been installed or come from CVMFS
29+
RUN pip install --no-deps --no-cache-dir \
30+
sparkconnector==3.0.9 \
31+
sparkmonitor==3.1.0 \
32+
swanportallocator==2.0.0 \
33+
swandask==0.0.5
34+
35+
# CERN-VRE
36+
# RUN pip install --no-deps --no-cache-dir rucio-jupyterlab==${RUCIO_JUPYTERLAB_VERSION}
37+
COPY ./wheels/rucio_jupyterlab-*.whl /tmp/
38+
RUN pip install --no-deps --no-cache-dir /tmp/rucio_jupyterlab-*.whl
39+
40+
# Install swandaskcluster in the lib directory that is exposed to notebooks and
41+
# terminals, which need it to do automatic TLS configuration for Dask clients
42+
RUN pip install --no-deps --no-cache-dir --target ${SWAN_LIB_DIR}/nb_term_lib \
43+
swandaskcluster==3.1.0
44+
45+
# Overwrite Python configuration, including section for the VRE
46+
# Add jupyter notebook configuration
47+
ADD python/jupyter_server_config.py /home/${NB_USER}/.jupyter/jupyter_server_config.py
48+
49+
# Add helper scripts
50+
COPY scripts/others/* /srv/singleuser/
51+
52+
# Add dask configuration file
53+
# Dask config: lab extension must use SwanHTCondorCluster
54+
ADD config/dask-labextension.yaml /etc/dask/labextension.yaml
55+
56+
USER root
57+
58+
# Install Spark extensions for classic UI
59+
RUN jupyter nbclassic-extension install --py --system sparkconnector && \
60+
jupyter nbclassic-extension install --py --system sparkmonitor
61+
62+
# HTCondor requirements
63+
RUN dnf install -y \
64+
# required by condor_submit
65+
perl-Archive-Tar \
66+
perl-Authen-Krb5 \
67+
perl-Sys-Hostname \
68+
perl-Sys-Syslog && \
69+
dnf clean all && \
70+
rm -rf /var/cache/dnf
71+
# Required for kerberos authentication to work
72+
ADD config/krb5.conf.no_rdns /etc/krb5.conf.no_rdns
73+
74+
# Import dependency of swandaskcluster, which is required by Dask clients
75+
# created from a notebook/terminal to create a Security object
76+
RUN ln -s $(pip show swanportallocator | grep -oP 'Location: \K.*')/swanportallocator ${SWAN_LIB_DIR}/nb_term_lib
77+
78+
# Create symlinks for the remaining swan extensions, because
79+
# they need to be accessible in the user environment
80+
RUN ln -s $(pip show sparkconnector | grep -oP 'Location: \K.*')/sparkconnector ${SWAN_LIB_DIR}/extensions/ && \
81+
ln -s $(pip show sparkmonitor | grep -oP 'Location: \K.*')/sparkmonitor ${SWAN_LIB_DIR}/extensions/ && \
82+
ln -s $(pip show swandask | grep -oP 'Location: \K.*')/swandask ${SWAN_LIB_DIR}/extensions/ && \
83+
ln -s $(pip show dask-labextension | grep -oP 'Location: \K.*')/dask_labextension ${SWAN_LIB_DIR}/extensions/ && \
84+
# dependency of dask-labextension
85+
ln -s $(pip show jupyter-server-proxy | grep -oP 'Location: \K.*')/jupyter_server_proxy ${SWAN_LIB_DIR}/extensions/ && \
86+
# dependency of jupyter-server-proxy
87+
ln -s $(pip show simpervisor | grep -oP 'Location: \K.*')/simpervisor ${SWAN_LIB_DIR}/extensions/
88+
89+
# CERN-VRE
90+
# The fact that the rucio_jupyterlab does not correspond with rucio-jupyterlab is how the package was dev
91+
RUN ln -s $(pip show rucio-jupyterlab | grep -oP 'Location: \K.*')/rucio_jupyterlab ${SWAN_LIB_DIR}/extensions/
92+
# RUN jupyter serverextension enable --py rucio_jupyterlab --sys-prefix
93+
94+
# Grant scripts execution permissions
95+
RUN chmod +x /usr/local/bin/before-notebook.d/*
96+
97+
# Switch back to jovyan to avoid accidental container runs as root
98+
USER ${NB_UID}
5.14 KB
Binary file not shown.

0 commit comments

Comments
 (0)