From c080aff78d9577e0f578d5aee7350ac25a213907 Mon Sep 17 00:00:00 2001
From: Lukas Masuch
Date: Wed, 29 Jan 2020 21:02:57 +0100
Subject: [PATCH 001/293] Remove unneeded fix
---
resources/tools/r-runtime.sh | 2 --
1 file changed, 2 deletions(-)
diff --git a/resources/tools/r-runtime.sh b/resources/tools/r-runtime.sh
index 60c0ddff..f2ea36df 100644
--- a/resources/tools/r-runtime.sh
+++ b/resources/tools/r-runtime.sh
@@ -23,8 +23,6 @@ if ! hash Rscript 2>/dev/null; then
conda install -y -c r "r-base==3.6.*" r-reticulate rpy2 r-rodbc unixodbc cyrus-sasl r-essentials r-cairo
# Install irkernel - needs to be installed from conda forge -> otherwise downgrades package
conda install -y -c conda-forge r-irkernel
- # Upgrade pyzmp to newest version -> gets downgraded for whatever reason...
- conda update -y pyzmq
else
echo "R runtime is already installed"
fi
From 94a503894c7427693f93c50630bfa047a5b638ed Mon Sep 17 00:00:00 2001
From: Lukas Masuch
Date: Tue, 4 Feb 2020 22:02:22 +0100
Subject: [PATCH 002/293] Fix jupyterlab build error -> remove chart editor
extension
---
Dockerfile | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/Dockerfile b/Dockerfile
index aff1db02..9b517b62 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -734,7 +734,7 @@ RUN \
jupyter labextension install @krassowski/jupyterlab-lsp && \
# For Plotly
jupyter labextension install @jupyterlab/plotly-extension && \
- jupyter labextension install jupyterlab-chart-editor && \
+ # produces build error: jupyter labextension install jupyterlab-chart-editor && \
# For holoview
jupyter labextension install @pyviz/jupyterlab_pyviz && \
# Install jupyterlab variable inspector - https://github.com/lckr/jupyterlab-variableInspector
From d38e98e3665b7869413a8e38714eed1e88cc97b4 Mon Sep 17 00:00:00 2001
From: Lukas Masuch
Date: Wed, 5 Feb 2020 13:13:09 +0100
Subject: [PATCH 003/293] Fix typo
---
Dockerfile | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/Dockerfile b/Dockerfile
index 9b517b62..820c4421 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -844,6 +844,7 @@ RUN \
apt-get update && \
# Newer jedi makes trouble with jupyterlab-lsp
pip install --no-cache-dir jedi==0.15.2 && \
+ apt-get install -y xfce4-clipman && \
# required by rodeo ide (8MB)
# apt-get install -y libgconf2-4 && \
# required for pvporcupine (800kb)
@@ -1066,7 +1067,7 @@ LABEL \
# "org.opencontainers.image.licenses"="Apache-2.0" \
"org.opencontainers.image.version"=$WORKSPACE_VERSION \
"org.opencontainers.image.vendor"="ML Tooling" \
- "org.opencontainers.image.authors"="Lukas Masuch & Benjamin Raehtlein" \
+ "org.opencontainers.image.authors"="Lukas Masuch & Benjamin Raethlein" \
"org.opencontainers.image.revision"=$ARG_VCS_REF \
"org.opencontainers.image.created"=$ARG_BUILD_DATE \
# Label Schema Convention (deprecated): http://label-schema.org/rc1/
From dace51e08e2aaff635562982719ea30dbf599b93 Mon Sep 17 00:00:00 2001
From: Lukas Masuch
Date: Wed, 5 Feb 2020 14:45:55 +0100
Subject: [PATCH 004/293] Add stdout logging to jupyterlab extension builds
---
Dockerfile | 33 ++++++++++++++++++++-------------
1 file changed, 20 insertions(+), 13 deletions(-)
diff --git a/Dockerfile b/Dockerfile
index 820c4421..ce10fbe7 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -699,12 +699,16 @@ RUN \
# install jupyterlab
RUN \
- # Required for jupytext and matplotlib plugins
- jupyter lab build && \
+ # without es6-promise some extension builds fail
+ npm install -g es6-promise && \
+ # define alias command for jupyterlab extension installs with log prints to stdout
+ lab_ext_install='jupyter labextension install -y --debug-log-path=/dev/stdout --log-level=WARN --minimize=False ' && \
# jupyterlab installed in requirements section
- jupyter labextension install @jupyter-widgets/jupyterlab-manager && \
+ $lab_ext_install @jupyter-widgets/jupyterlab-manager && \
# If minimal flavor - do not install jupyterlab extensions
if [ "$WORKSPACE_FLAVOR" = "minimal" ]; then \
+ # Final build with minimization
+ jupyter lab build -y --debug-log-path=/dev/stdout --log-level=WARN && \
# Cleanup
jupyter lab clean && \
jlpm cache clean && \
@@ -712,16 +716,18 @@ RUN \
clean-layer.sh && \
exit 0 ; \
fi && \
- jupyter labextension install @jupyterlab/toc && \
- jupyter labextension install jupyterlab_tensorboard && \
+ $lab_ext_install @jupyterlab/toc && \
+ $lab_ext_install jupyterlab_tensorboard && \
# install jupyterlab git
- jupyter labextension install @jupyterlab/git && \
+ $lab_ext_install @jupyterlab/git && \
pip install jupyterlab-git && \
jupyter serverextension enable --py jupyterlab_git && \
# For Matplotlib: https://github.com/matplotlib/jupyter-matplotlib
- jupyter labextension install jupyter-matplotlib && \
+ $lab_ext_install jupyter-matplotlib && \
# Do not install any other jupyterlab extensions
if [ "$WORKSPACE_FLAVOR" = "light" ]; then \
+ # Final build with minimization
+ jupyter lab build -y --debug-log-path=/dev/stdout --log-level=WARN && \
# Cleanup
jupyter lab clean && \
jlpm cache clean && \
@@ -731,19 +737,20 @@ RUN \
fi && \
# Install jupyterlab language server support
pip install --pre jupyter-lsp && \
- jupyter labextension install @krassowski/jupyterlab-lsp && \
+ $lab_ext_install @krassowski/jupyterlab-lsp && \
# For Plotly
- jupyter labextension install @jupyterlab/plotly-extension && \
+ $lab_ext_install @jupyterlab/plotly-extension && \
# produces build error: jupyter labextension install jupyterlab-chart-editor && \
# For holoview
- jupyter labextension install @pyviz/jupyterlab_pyviz && \
+ $lab_ext_install @pyviz/jupyterlab_pyviz && \
# Install jupyterlab variable inspector - https://github.com/lckr/jupyterlab-variableInspector
- jupyter labextension install @lckr/jupyterlab_variableinspector && \
+ $lab_ext_install @lckr/jupyterlab_variableinspector && \
# Install jupyterlab code formattor - https://github.com/ryantam626/jupyterlab_code_formatter
- jupyter labextension install @ryantam626/jupyterlab_code_formatter && \
+ $lab_ext_install @ryantam626/jupyterlab_code_formatter && \
pip install jupyterlab_code_formatter && \
jupyter serverextension enable --py jupyterlab_code_formatter && \
- jupyter lab build && \
+ # Final build with minimization
+ jupyter lab build -y --debug-log-path=/dev/stdout --log-level=WARN && \
# Cleanup
# Clean jupyter lab cache: https://github.com/jupyterlab/jupyterlab/issues/4930
jupyter lab clean && \
From 4c55a77a9b8ff2afb69458257d60c996fecdc54e Mon Sep 17 00:00:00 2001
From: Lukas Masuch
Date: Wed, 5 Feb 2020 15:48:30 +0100
Subject: [PATCH 005/293] Update readme
---
README.md | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/README.md b/README.md
index 89580f83..d6fdd607 100644
--- a/README.md
+++ b/README.md
@@ -69,7 +69,8 @@ To deploy a single instance for productive usage, we recommend to apply at least
```bash
docker run -d \
-p 8080:8080 \
- --name "ml-workspace" -v "${PWD}:/workspace" \
+ --name "ml-workspace" \
+ -v "${PWD}:/workspace" \
--env AUTHENTICATE_VIA_JUPYTER="mytoken" \
--shm-size 512m \
--restart always \
From 4f8e70d3d6576fe619bb667cd66f0db21af08eb1 Mon Sep 17 00:00:00 2001
From: Lukas Masuch
Date: Mon, 10 Feb 2020 18:04:21 +0100
Subject: [PATCH 006/293] Improve netdata config to be less heavy on resources
---
resources/netdata/netdata.conf | 26 ++++++++++++++++++++++++++
1 file changed, 26 insertions(+)
diff --git a/resources/netdata/netdata.conf b/resources/netdata/netdata.conf
index 4a3e47c5..b4f1a1f5 100644
--- a/resources/netdata/netdata.conf
+++ b/resources/netdata/netdata.conf
@@ -1,9 +1,35 @@
# netdata can generate its own config which is available at 'http:///netdata.conf'
# You can download it with command like: 'wget -O /etc/netdata/netdata.conf http://localhost:19999/netdata.conf'
+# https://github.com/netdata/netdata/blob/master/daemon/config/README.md#global-section-options
+# Imporeve Performance: https://docs.netdata.cloud/docs/performance/
[web]
default port = 8050
+# use dbengine with memory for storing metrics:
+# https://blog.netdata.cloud/posts/db-engine/
+# https://docs.netdata.cloud/docs/tutorials/longer-metrics-storage/
+# https://docs.netdata.cloud/database/engine/#operation
+# https://docs.netdata.cloud/database/#running-a-dedicated-central-netdata-server
+[global]
+ # disable debug and access logs: probably not needed
+ debug log = none
+ access log = none
+ # Set database to dbengine
+ memory mode = dbengine
+ page cache size = 32
+ dbengine disk space = 256
+ # only update every 2 seconds
+ update every = 2
+ # Use memory mode instead?
+ # memory mode = ram
+ # history = 3600
+
+[plugins]
+ # deactivate apps plugin -> consumes lots of memory
+ apps = no
+ # tc = no
+
# Disable registry?
#[registry]
# enabled = yes
From 530652139de15f97578dfbcd25e63fbf0c302bd4 Mon Sep 17 00:00:00 2001
From: Lukas Masuch
Date: Tue, 11 Feb 2020 16:15:27 +0100
Subject: [PATCH 007/293] Add cuda 10.0 installer script
---
resources/tools/cuda-10-0.sh | 38 ++++++++++++++++++++++++++++++++++++
1 file changed, 38 insertions(+)
create mode 100644 resources/tools/cuda-10-0.sh
diff --git a/resources/tools/cuda-10-0.sh b/resources/tools/cuda-10-0.sh
new file mode 100644
index 00000000..ff7ccf9a
--- /dev/null
+++ b/resources/tools/cuda-10-0.sh
@@ -0,0 +1,38 @@
+#!/bin/sh
+
+# Stops script execution if a command has an error
+set -e
+
+INSTALL_ONLY=0
+# Loop through arguments and process them: https://pretzelhands.com/posts/command-line-flags
+for arg in "$@"; do
+ case $arg in
+ -i|--install) INSTALL_ONLY=1 ; shift ;;
+ *) break ;;
+ esac
+done
+
+if [ ! -d "/usr/local/cuda" ]; then
+ echo "Installing CUDA 10.0 runtime. Please wait..."
+ mkdir $RESOURCES_PATH"/cuda-10-0"
+ cd $RESOURCES_PATH"/cuda-10-0"
+ curl -fsSL https://developer.download.nvidia.com/compute/cuda/repos/ubuntu1804/x86_64/7fa2af80.pub | apt-key add -
+ echo "deb https://developer.download.nvidia.com/compute/cuda/repos/ubuntu1804/x86_64 /" > /etc/apt/sources.list.d/cuda.list
+ echo "deb https://developer.download.nvidia.com/compute/machine-learning/repos/ubuntu1804/x86_64 /" > /etc/apt/sources.list.d/nvidia-ml.list
+ apt-get update && apt-get install -y --no-install-recommends cuda-cudart-10-0=10.0.130-1 cuda-compat-10-0
+ ln -s cuda-10.0 /usr/local/cuda
+ apt-get update && apt-get install -y --no-install-recommends cuda-libraries-10-0=10.0.130-1 cuda-nvtx-10-0=10.0.130-1
+ /bin/rm -rf /var/lib/apt/lists/*
+ # libnccl2=2.4.2-1+cuda10.0
+ # cd back otherwise clean layer will fail since it is deleted
+ cd $RESOURCES_PATH
+ rm -r $RESOURCES_PATH"/cuda-10-0"
+else
+ echo "CUDA 10.0 is already installed"
+fi
+
+# Run
+if [ $INSTALL_ONLY = 0 ] ; then
+ echo "Use CUDA 10.0 via supporting libraries and frameworks."
+ sleep 15
+fi
\ No newline at end of file
From 6229044144ef40b72459d386daef64970f4525a2 Mon Sep 17 00:00:00 2001
From: Lukas Masuch
Date: Tue, 11 Feb 2020 16:16:07 +0100
Subject: [PATCH 008/293] Add comment to cuda install script
---
resources/tools/cuda-10-0.sh | 1 +
1 file changed, 1 insertion(+)
diff --git a/resources/tools/cuda-10-0.sh b/resources/tools/cuda-10-0.sh
index ff7ccf9a..b3ce591e 100644
--- a/resources/tools/cuda-10-0.sh
+++ b/resources/tools/cuda-10-0.sh
@@ -16,6 +16,7 @@ if [ ! -d "/usr/local/cuda" ]; then
echo "Installing CUDA 10.0 runtime. Please wait..."
mkdir $RESOURCES_PATH"/cuda-10-0"
cd $RESOURCES_PATH"/cuda-10-0"
+ # Instructions from: https://gitlab.com/nvidia/container-images/cuda/-/tree/ubuntu18.04/10.0
curl -fsSL https://developer.download.nvidia.com/compute/cuda/repos/ubuntu1804/x86_64/7fa2af80.pub | apt-key add -
echo "deb https://developer.download.nvidia.com/compute/cuda/repos/ubuntu1804/x86_64 /" > /etc/apt/sources.list.d/cuda.list
echo "deb https://developer.download.nvidia.com/compute/machine-learning/repos/ubuntu1804/x86_64 /" > /etc/apt/sources.list.d/nvidia-ml.list
From 9c1142674c5c5001abb809579b77e9b8c17df523 Mon Sep 17 00:00:00 2001
From: Lukas Masuch
Date: Tue, 11 Feb 2020 16:52:07 +0100
Subject: [PATCH 009/293] Add problem with SSE4.2 CPU support in documentation
---
README.md | 14 ++++++++++++++
1 file changed, 14 insertions(+)
diff --git a/README.md b/README.md
index d6fdd607..e84bff0f 100644
--- a/README.md
+++ b/README.md
@@ -928,6 +928,20 @@ docker run --shm-size=2G mltooling/ml-workspace:latest
+
+
+Nginx terminates with SIGILL core dumped error (click to expand...)
+
+If you encounter the following error within the container logs when starting the workspace, it will most likely not be possible to run the workspace on your hardware:
+
+```
+exited: nginx (terminated by SIGILL (core dumped); not expected)
+```
+
+The OpenResty/Nginx binary package used within the workspace requires to run on a CPU with `SSE4.2` support (see [this issue](https://github.com/openresty/openresty/issues/267#issuecomment-309296900)). Unfortunatly, some older CPUs do not have support for `SSE4.2` and, therefore, will not be able to run the workspace container. On Linux, you can check if your CPU supports `SSE4.2` when looking into the `cat /proc/cpuinfo` flags section. If you encounter this problem, feel free to notify us by commenting in the following issue: #30.
+
+
+
---
From 4e0964017eda170d019f9c365cf7b64bb0aeedfb Mon Sep 17 00:00:00 2001
From: Lukas Masuch
Date: Tue, 11 Feb 2020 16:58:44 +0100
Subject: [PATCH 010/293] Update readme
---
README.md | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/README.md b/README.md
index e84bff0f..10eadbb2 100644
--- a/README.md
+++ b/README.md
@@ -938,7 +938,7 @@ If you encounter the following error within the container logs when starting the
exited: nginx (terminated by SIGILL (core dumped); not expected)
```
-The OpenResty/Nginx binary package used within the workspace requires to run on a CPU with `SSE4.2` support (see [this issue](https://github.com/openresty/openresty/issues/267#issuecomment-309296900)). Unfortunatly, some older CPUs do not have support for `SSE4.2` and, therefore, will not be able to run the workspace container. On Linux, you can check if your CPU supports `SSE4.2` when looking into the `cat /proc/cpuinfo` flags section. If you encounter this problem, feel free to notify us by commenting in the following issue: #30.
+The OpenResty/Nginx binary package used within the workspace requires to run on a CPU with `SSE4.2` support (see [this issue](https://github.com/openresty/openresty/issues/267#issuecomment-309296900)). Unfortunately, some older CPUs do not have support for `SSE4.2` and, therefore, will not be able to run the workspace container. On Linux, you can check if your CPU supports `SSE4.2` when looking into the `cat /proc/cpuinfo` flags section. If you encounter this problem, feel free to notify us by commenting on the following issue: [#30](https://github.com/ml-tooling/ml-workspace/issues/30).
From 2e8da5ff0014adf554602afedbfb64d49b7fbe11 Mon Sep 17 00:00:00 2001
From: Lukas Masuch
Date: Thu, 13 Feb 2020 17:24:14 +0100
Subject: [PATCH 011/293] Add tool installer for portainer
---
resources/tools/portainer.sh | 40 ++++++++++++++++++++++++++++++++++++
1 file changed, 40 insertions(+)
create mode 100644 resources/tools/portainer.sh
diff --git a/resources/tools/portainer.sh b/resources/tools/portainer.sh
new file mode 100644
index 00000000..264cda44
--- /dev/null
+++ b/resources/tools/portainer.sh
@@ -0,0 +1,40 @@
+#!/bin/sh
+
+# Stops script execution if a command has an error
+set -e
+
+INSTALL_ONLY=0
+PORT=""
+# Loop through arguments and process them: https://pretzelhands.com/posts/command-line-flags
+for arg in "$@"; do
+ case $arg in
+ -i|--install) INSTALL_ONLY=1 ; shift ;;
+ -p=*|--port=*) PORT="${arg#*=}" ; shift ;; # TODO Does not allow --port 1234
+ *) break ;;
+ esac
+done
+
+if [ ! -f "$RESOURCES_PATH/portainer/portainer" ]; then
+ echo "Installing Portainer. Please wait..."
+ cd $RESOURCES_PATH
+ PORTAINER_VERSION=1.23.0
+ wget https://github.com/portainer/portainer/releases/download/$PORTAINER_VERSION/portainer-$PORTAINER_VERSION-linux-amd64.tar.gz
+ tar xvpfz portainer-$PORTAINER_VERSION-linux-amd64.tar.gz
+ rm ./portainer-$PORTAINER_VERSION-linux-amd64.tar.gz
+else
+ echo "Portainer is already installed"
+fi
+
+# Run
+if [ $INSTALL_ONLY = 0 ] ; then
+ if [ -z "$PORT" ]; then
+ read -p "Please provide a port for starting Portainer: " PORT
+ fi
+
+ echo "Starting Portainer on port "$PORT
+ # Create tool entry for tooling plugin
+ echo '{"id": "portainer-link", "name": "Portainer", "url_path": "/tools/'$PORT'/", "description": "Lightweight management UI for Docker"}' > $HOME/.workspace/tools/embedding-projector.json
+ cd $RESOURCES_PATH/portainer
+ ./portainer --template-file "${PWD}/templates.json" -p :$PORT --no-auth
+ sleep 10
+fi
From f939e1c888e2ae34796e6768ae27a51fbf9c2f51 Mon Sep 17 00:00:00 2001
From: Lukas Masuch
Date: Fri, 14 Feb 2020 04:10:59 +0100
Subject: [PATCH 012/293] Update portainer installer
---
resources/tools/portainer.sh | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/resources/tools/portainer.sh b/resources/tools/portainer.sh
index 264cda44..1fb5989e 100644
--- a/resources/tools/portainer.sh
+++ b/resources/tools/portainer.sh
@@ -21,6 +21,7 @@ if [ ! -f "$RESOURCES_PATH/portainer/portainer" ]; then
wget https://github.com/portainer/portainer/releases/download/$PORTAINER_VERSION/portainer-$PORTAINER_VERSION-linux-amd64.tar.gz
tar xvpfz portainer-$PORTAINER_VERSION-linux-amd64.tar.gz
rm ./portainer-$PORTAINER_VERSION-linux-amd64.tar.gz
+ mkdir $RESOURCES_PATH/portainer/portainer-data
else
echo "Portainer is already installed"
fi
@@ -35,6 +36,6 @@ if [ $INSTALL_ONLY = 0 ] ; then
# Create tool entry for tooling plugin
echo '{"id": "portainer-link", "name": "Portainer", "url_path": "/tools/'$PORT'/", "description": "Lightweight management UI for Docker"}' > $HOME/.workspace/tools/embedding-projector.json
cd $RESOURCES_PATH/portainer
- ./portainer --template-file "${PWD}/templates.json" -p :$PORT --no-auth
+ ./portainer --template-file "${PWD}/templates.json" -p :$PORT --no-auth --data $RESOURCES_PATH/portainer/portainer-data
sleep 10
fi
From 9638bee01636abe945235af4c7172d52bbf7b7ab Mon Sep 17 00:00:00 2001
From: Lukas Masuch
Date: Fri, 14 Feb 2020 04:12:38 +0100
Subject: [PATCH 013/293] Add support for soft limits on workspace folder
memory and container size
---
.../tooling-notebook-widget.js | 2 +
.../tooling-shared-components.js | 79 ++
.../jupyter_tooling/tooling-tree-widget.js | 2 +
.../jupyter_tooling/tooling_handler.py | 759 ++++++++++++++----
4 files changed, 683 insertions(+), 159 deletions(-)
diff --git a/resources/jupyter/extensions/tooling-extension/jupyter_tooling/tooling-notebook-widget.js b/resources/jupyter/extensions/tooling-extension/jupyter_tooling/tooling-notebook-widget.js
index 8775c4cd..4c3b3e61 100644
--- a/resources/jupyter/extensions/tooling-extension/jupyter_tooling/tooling-notebook-widget.js
+++ b/resources/jupyter/extensions/tooling-extension/jupyter_tooling/tooling-notebook-widget.js
@@ -43,6 +43,8 @@ define(['base/js/namespace', 'jquery', 'base/js/dialog', 'base/js/utils', 'requi
// add button for new action
Jupyter.toolbar.add_buttons_group([Jupyter.actions.register(git_helper, 'commit_push', 'notebook')])
Jupyter.toolbar.add_buttons_group([Jupyter.actions.register(share_notebook, 'share_notebook', 'notebook')])
+
+ components.checkDiskStorage()
}
//Loads the extension
diff --git a/resources/jupyter/extensions/tooling-extension/jupyter_tooling/tooling-shared-components.js b/resources/jupyter/extensions/tooling-extension/jupyter_tooling/tooling-shared-components.js
index 36253961..f85fdfe4 100644
--- a/resources/jupyter/extensions/tooling-extension/jupyter_tooling/tooling-shared-components.js
+++ b/resources/jupyter/extensions/tooling-extension/jupyter_tooling/tooling-shared-components.js
@@ -467,6 +467,85 @@ define(['base/js/namespace', 'jquery', 'base/js/dialog', 'require', 'exports', '
});
}
+ checkDiskStorage() {
+ $.ajaxSetup(this.ajaxCookieTokenHandling());
+ var that = this;
+ var settings = {
+ url: basePath + 'tooling/storage/check',
+ processData: false,
+ type: "GET",
+ success: function (data) {
+ data = JSON.parse(data)
+ if (data["workspaceFolderSizeWarning"] || data["containerSizeWarning"]) {
+ // open warning dialog if either container size or workspace folder size shows warning
+ that.openDiskStorageWarningDialog(data)
+ }
+ },
+ error: function (response) {
+ let errorMsg = "An unknown error occurred while checking disk storage.";
+ if (response && response.responseText) {
+ try {
+ let data = JSON.parse(response.responseText)
+ if (Boolean(data["error"])) {
+ errorMsg = data["error"];
+ }
+ } catch (e) {
+ errorMsg = String(response.responseText)
+ }
+ }
+ that.openErrorDialog(errorMsg, null);
+ }
+ }
+ $.ajax(settings)
+ }
+
+ storageWarningDialog(data) {
+
+ var div = $('');
+
+ var warning_div = "";
+ if (data["workspaceFolderSizeWarning"]) {
+ warning_div += '
Size of your /workspace folder: ' + data["workspaceFolderSize"] + ' GB / '+ data["workspaceFolderSizeLimit"] + ' GB
';
+ warning_div += '
You have exceeded the limit of available disk storage assigned to your /workspace folder (your working directory). Please delete unnecessary files and folders from the /workspace folder.
Size of your workspace container: ' + data["containerSize"] + ' GB / '+ data["containerSizeLimit"] + ' GB
';
+ warning_div += '
You have exceeded the limit of available disk storage assigned to your workspace container. Usually, this includes everything stored outside of the /workspace folder (working directory). Your workspace container might be automatically reset, in case you do not clean up your container storage. This container reset will remove all files outside of the /workspace folder.
';
+ }
+ div.append('
'+warning_div+"
");
+ div.append('
To find the largest files and directories, we recommend to use the terminal with the following command: ncdu / the Disk Usage Analyzer application accessible from Applications -> System within the VNC Desktop. Alternatively, you can also use
');
+ return div
+ }
+
+ openDiskStorageWarningDialog(data, successCallback) {
+ var that = this;
+
+ dialog.modal({
+ title: 'DISK STORAGE WARNING',
+ body: that.storageWarningDialog(data),
+ keyboard_manager: Jupyter.keyboard_manager,
+ sanitize: false,
+ buttons: {
+ 'Open VNC for Clean-up': {
+ click: function () {
+ // Open VNC
+ window.open(basePath + "tools/vnc/?password=vncpassword", '_blank');
+ }
+ },
+ 'Open Terminal for Clean-up': {
+ class: "btn-danger",
+ //class: "btn-primary",
+ click: function () {
+ // TODO: Copy cleanup command to clipboard?
+ // Open Terminal
+ window.open(basePath + "terminals/cleanup", '_blank')
+ }
+ }
+ }
+ })
+ }
+
/**
* @param {list} contains email and name
* @return {string} The html code to configure the git.name and the git.email of the git user
diff --git a/resources/jupyter/extensions/tooling-extension/jupyter_tooling/tooling-tree-widget.js b/resources/jupyter/extensions/tooling-extension/jupyter_tooling/tooling-tree-widget.js
index 76949a92..f42e0d79 100644
--- a/resources/jupyter/extensions/tooling-extension/jupyter_tooling/tooling-tree-widget.js
+++ b/resources/jupyter/extensions/tooling-extension/jupyter_tooling/tooling-tree-widget.js
@@ -99,6 +99,8 @@ define(['base/js/namespace', 'jquery', 'base/js/dialog', 'base/js/utils', 'requi
}
};
Jupyter.notebook_list._selection_changed();
+
+ components.checkDiskStorage()
}
// Loads the extension
diff --git a/resources/jupyter/extensions/tooling-extension/jupyter_tooling/tooling_handler.py b/resources/jupyter/extensions/tooling-extension/jupyter_tooling/tooling_handler.py
index 729c9140..cd5304cc 100644
--- a/resources/jupyter/extensions/tooling-extension/jupyter_tooling/tooling_handler.py
+++ b/resources/jupyter/extensions/tooling-extension/jupyter_tooling/tooling_handler.py
@@ -1,37 +1,55 @@
-import json
import glob
+import json
import os
import subprocess
-from subprocess import call
-
-try:
- from urllib.parse import unquote
-except ImportError:
- from urllib import unquote
-
+import threading
+import time
import warnings
from datetime import datetime
+from subprocess import call
import git
import tornado
from notebook.base.handlers import IPythonHandler
-from notebook .utils import url_path_join
+from notebook.utils import url_path_join
from tornado import web
+try:
+ from urllib.parse import unquote
+except ImportError:
+ from urllib import unquote
+
+
SHARED_SSH_SETUP_PATH = "/shared/ssh/setup"
HOME = os.getenv("HOME", "/root")
RESOURCES_PATH = os.getenv("RESOURCES_PATH", "/resources")
+WORKSPACE_HOME = os.getenv("WORKSPACE_HOME", "/workspace")
+WORKSPACE_CONFIG_FOLDER = os.path.join(HOME, ".workspace")
+
+MAX_WORKSPACE_FOLDER_SIZE = os.getenv("MAX_WORKSPACE_FOLDER_SIZE", None)
+if MAX_WORKSPACE_FOLDER_SIZE and MAX_WORKSPACE_FOLDER_SIZE.isnumeric():
+ MAX_WORKSPACE_FOLDER_SIZE = int(MAX_WORKSPACE_FOLDER_SIZE)
+else:
+ MAX_WORKSPACE_FOLDER_SIZE = None
+
+MAX_CONTAINER_SIZE = os.getenv("MAX_CONTAINER_SIZE", None)
+if MAX_CONTAINER_SIZE and MAX_CONTAINER_SIZE.isnumeric():
+ MAX_CONTAINER_SIZE = int(MAX_CONTAINER_SIZE)
+else:
+ MAX_CONTAINER_SIZE = None
+
# -------------- HANDLER -------------------------
-class HelloWorldHandler(IPythonHandler):
+
+class HelloWorldHandler(IPythonHandler):
def data_received(self, chunk):
pass
def get(self):
result = self.request.protocol + "://" + self.request.host
- if 'base_url' in self.application.settings:
- result = result + " " + self.application.settings['base_url']
+ if "base_url" in self.application.settings:
+ result = result + " " + self.application.settings["base_url"]
self.finish(result)
@@ -47,9 +65,7 @@ def handle_error(handler, status_code: int, error_msg: str = None, exception=Non
error_msg += str(exception)
- error = {
- "error": error_msg
- }
+ error = {"error": error_msg}
handler.finish(json.dumps(error))
log.info("An error occurred (" + str(status_code) + "): " + error_msg)
@@ -58,45 +74,54 @@ def handle_error(handler, status_code: int, error_msg: str = None, exception=Non
def send_data(handler, data):
handler.finish(json.dumps(data, sort_keys=True, indent=4))
-class PingHandler(IPythonHandler):
+class PingHandler(IPythonHandler):
@web.authenticated
def get(self):
# Used by Jupyterhub to test if user cookies are valid
self.finish("Successful")
-class InstallToolHandler(IPythonHandler):
+class InstallToolHandler(IPythonHandler):
@web.authenticated
def get(self):
try:
- workspace_installer_folder = RESOURCES_PATH + '/tools/'
+ workspace_installer_folder = RESOURCES_PATH + "/tools/"
workspace_tool_installers = []
-
+
# sort entries by name
- for f in sorted(glob.glob(os.path.join(workspace_installer_folder, '*.sh'))):
+ for f in sorted(
+ glob.glob(os.path.join(workspace_installer_folder, "*.sh"))
+ ):
tool_name = os.path.splitext(os.path.basename(f))[0].strip()
- workspace_tool_installers.append({
- "name": tool_name,
- "command": "/bin/bash " + f})
-
+ workspace_tool_installers.append(
+ {"name": tool_name, "command": "/bin/bash " + f}
+ )
+
if not workspace_tool_installers:
- log.warn("No workspace tool installers found at path: " + workspace_installer_folder)
+ log.warn(
+ "No workspace tool installers found at path: "
+ + workspace_installer_folder
+ )
# Backup if file does not exist
- workspace_tool_installers.append({
- "name": "none",
- "command": "No workspace tool installers found at path: " + workspace_installer_folder})
+ workspace_tool_installers.append(
+ {
+ "name": "none",
+ "command": "No workspace tool installers found at path: "
+ + workspace_installer_folder,
+ }
+ )
self.finish(json.dumps(workspace_tool_installers))
except Exception as ex:
handle_error(self, 500, exception=ex)
return
-class ToolingHandler(IPythonHandler):
+class ToolingHandler(IPythonHandler):
@web.authenticated
def get(self):
try:
- workspace_tooling_folder = HOME + '/.workspace/tools/'
+ workspace_tooling_folder = HOME + "/.workspace/tools/"
workspace_tools = []
def tool_is_duplicated(tool_array, tool):
@@ -105,9 +130,11 @@ def tool_is_duplicated(tool_array, tool):
if "id" in t and "id" in tool and tool["id"] == t["id"]:
return True
return False
-
+
# sort entries by name
- for f in sorted(glob.glob(os.path.join(workspace_tooling_folder, '*.json'))):
+ for f in sorted(
+ glob.glob(os.path.join(workspace_tooling_folder, "*.json"))
+ ):
try:
with open(f, "rb") as tool_file:
tool_data = json.load(tool_file)
@@ -121,30 +148,38 @@ def tool_is_duplicated(tool_array, tool):
for tool in tool_data:
if not tool_is_duplicated(workspace_tools, tool):
workspace_tools.append(tool)
- except:
+ except Exception:
log.warn("Failed to load tools file: " + f.name)
continue
if not workspace_tools:
- log.warn("No workspace tools found at path: " + workspace_tooling_folder)
+ log.warn(
+ "No workspace tools found at path: " + workspace_tooling_folder
+ )
# Backup if file does not exist
- workspace_tools.append({"id": "vnc-link",
- "name": "VNC",
- "url_path": "/tools/vnc/?password=vncpassword",
- "description": "Desktop GUI for the workspace"})
+ workspace_tools.append(
+ {
+ "id": "vnc-link",
+ "name": "VNC",
+ "url_path": "/tools/vnc/?password=vncpassword",
+ "description": "Desktop GUI for the workspace",
+ }
+ )
self.finish(json.dumps(workspace_tools))
except Exception as ex:
handle_error(self, 500, exception=ex)
return
-class GitCommitHandler(IPythonHandler):
+class GitCommitHandler(IPythonHandler):
@web.authenticated
def post(self):
data = self.get_json_body()
if data is None:
- handle_error(self, 400, "Please provide a valid file path and commit msg in body.")
+ handle_error(
+ self, 400, "Please provide a valid file path and commit msg in body."
+ )
return
if "filePath" not in data or not data["filePath"]:
@@ -165,11 +200,10 @@ def post(self):
class GitInfoHandler(IPythonHandler):
-
@web.authenticated
def get(self):
try:
- path = _resolve_path(self.get_argument('path', None))
+ path = _resolve_path(self.get_argument("path", None))
send_data(self, get_git_info(path))
except Exception as ex:
handle_error(self, 500, exception=ex)
@@ -178,7 +212,7 @@ def get(self):
@web.authenticated
def post(self):
- path = _resolve_path(self.get_argument('path', None))
+ path = _resolve_path(self.get_argument("path", None))
data = self.get_json_body()
if data is None:
@@ -205,8 +239,8 @@ def post(self):
handle_error(self, 500, exception=ex)
return
-class SSHScriptHandler(IPythonHandler):
+class SSHScriptHandler(IPythonHandler):
@web.authenticated
def get(self):
try:
@@ -215,16 +249,21 @@ def get(self):
handle_error(self, 500, exception=ex)
return
+
class SharedSSHHandler(IPythonHandler):
- def get(self):
+ def get(self):
# authentication only via token
try:
sharing_enabled = os.environ.get("SHARED_LINKS_ENABLED", "false")
if sharing_enabled.lower() != "true":
- handle_error(self, 401, error_msg="Shared links are disabled. Please download and execute the SSH script manually.")
+ handle_error(
+ self,
+ 401,
+ error_msg="Shared links are disabled. Please download and execute the SSH script manually.",
+ )
return
-
- token = self.get_argument('token', None)
+
+ token = self.get_argument("token", None)
valid_token = generate_token(self.request.path)
if not token:
self.set_status(401)
@@ -234,42 +273,55 @@ def get(self):
self.set_status(401)
self.finish('echo "The provided token is not valid."')
return
-
+
handle_ssh_script_request(self)
except Exception as ex:
handle_error(self, 500, exception=ex)
return
-class SSHCommandHandler(IPythonHandler):
+class SSHCommandHandler(IPythonHandler):
@web.authenticated
def get(self):
try:
sharing_enabled = os.environ.get("SHARED_LINKS_ENABLED", "false")
if sharing_enabled.lower() != "true":
- self.finish("Shared links are disabled. Please download and executen the SSH script manually.")
+ self.finish(
+ "Shared links are disabled. Please download and executen the SSH script manually."
+ )
return
-
+
# schema + host + port
- origin = self.get_argument('origin', None)
+ origin = self.get_argument("origin", None)
if not origin:
- handle_error(self, 400, "Please provide a valid origin (endpoint url) via get parameter.")
+ handle_error(
+ self,
+ 400,
+ "Please provide a valid origin (endpoint url) via get parameter.",
+ )
return
-
+
host, port = parse_endpoint_origin(origin)
- base_url = web_app.settings['base_url'].rstrip("/") + SHARED_SSH_SETUP_PATH
- setup_command = '/bin/bash <(curl -s --insecure "' \
- + origin + base_url \
- + "?token=" + generate_token(base_url) \
- + "&host=" + host \
- + "&port=" + port \
- + '")'
-
+ base_url = web_app.settings["base_url"].rstrip("/") + SHARED_SSH_SETUP_PATH
+ setup_command = (
+ '/bin/bash <(curl -s --insecure "'
+ + origin
+ + base_url
+ + "?token="
+ + generate_token(base_url)
+ + "&host="
+ + host
+ + "&port="
+ + port
+ + '")'
+ )
+
self.finish(setup_command)
except Exception as ex:
handle_error(self, 500, exception=ex)
return
+
class SharedTokenHandler(IPythonHandler):
@web.authenticated
def get(self):
@@ -278,12 +330,14 @@ def get(self):
if sharing_enabled.lower() != "true":
handle_error(self, 400, error_msg="Shared links are disabled.")
return
-
- path = self.get_argument('path', None)
+
+ path = self.get_argument("path", None)
if path is None:
- handle_error(self, 400, "Please provide a valid path via get parameter.")
+ handle_error(
+ self, 400, "Please provide a valid path via get parameter."
+ )
return
-
+
self.finish(generate_token(path))
except Exception as ex:
handle_error(self, 500, exception=ex)
@@ -296,24 +350,34 @@ def get(self):
try:
sharing_enabled = os.environ.get("SHARED_LINKS_ENABLED", "false")
if sharing_enabled.lower() != "true":
- self.finish("Shared links are disabled. Please download and share the data manually.")
+ self.finish(
+ "Shared links are disabled. Please download and share the data manually."
+ )
return
-
- path = _resolve_path(self.get_argument('path', None))
+
+ path = _resolve_path(self.get_argument("path", None))
if not path:
- handle_error(self, 400, "Please provide a valid path via get parameter.")
+ handle_error(
+ self, 400, "Please provide a valid path via get parameter."
+ )
return
-
+
if not os.path.exists(path):
- handle_error(self, 400, "The selected file or folder does not exist: " + path)
+ handle_error(
+ self, 400, "The selected file or folder does not exist: " + path
+ )
return
-
+
# schema + host + port
- origin = self.get_argument('origin', None)
+ origin = self.get_argument("origin", None)
if not origin:
- handle_error(self, 400, "Please provide a valid origin (endpoint url) via get parameter.")
+ handle_error(
+ self,
+ 400,
+ "Please provide a valid origin (endpoint url) via get parameter.",
+ )
return
-
+
token = generate_token(path)
try:
@@ -321,19 +385,28 @@ def get(self):
call("supervisorctl stop filebrowser", shell=True)
# Add new user with the given permissions and scope
- add_user_command = "filebrowser users add " + token + " " + token \
- + " --perm.admin=false --perm.create=false --perm.delete=false" \
- + " --perm.download=true --perm.execute=false --perm.modify=false" \
- + " --perm.rename=false --perm.share=false --lockPassword=true" \
- + " --database=" + HOME + "/filebrowser.db --scope=\"" + path + "\""
+ add_user_command = (
+ "filebrowser users add "
+ + token
+ + " "
+ + token
+ + " --perm.admin=false --perm.create=false --perm.delete=false"
+ + " --perm.download=true --perm.execute=false --perm.modify=false"
+ + " --perm.rename=false --perm.share=false --lockPassword=true"
+ + " --database="
+ + HOME
+ + '/filebrowser.db --scope="'
+ + path
+ + '"'
+ )
call(add_user_command, shell=True)
- except:
+ except Exception:
pass
-
+
call("supervisorctl start filebrowser", shell=True)
- base_url = web_app.settings['base_url'].rstrip("/") + "/shared/filebrowser/"
+ base_url = web_app.settings["base_url"].rstrip("/") + "/shared/filebrowser/"
setup_command = origin + base_url + "?token=" + token
self.finish(setup_command)
@@ -342,10 +415,318 @@ def get(self):
return
+class StorageCheckHandler(IPythonHandler):
+ @web.authenticated
+ def get(self) -> None:
+ try:
+ CHECK_INTERVAL_MINUTES = 5
+
+ result = {
+ "workspaceFolderSizeWarning": False,
+ "containerSizeWarning": False
+ }
+
+ if not MAX_WORKSPACE_FOLDER_SIZE and not MAX_CONTAINER_SIZE:
+ self.finish(json.dumps(result))
+ return
+
+ minutes_since_update = get_minutes_since_size_update()
+ if minutes_since_update is not None and minutes_since_update < CHECK_INTERVAL_MINUTES:
+ # only run check every 5 minutes
+ self.finish(json.dumps(result))
+ return
+
+ # Only run update every two minutes
+ # run update in background -> somtimes it might need to much time to run
+
+ thread = threading.Thread(target=update_workspace_metadata)
+ thread.daemon = True
+ thread.start()
+
+ container_size_in_gb = get_container_size()
+
+ if MAX_CONTAINER_SIZE:
+ if container_size_in_gb > MAX_CONTAINER_SIZE:
+ # Wait for metadata update before showing the warning
+ # sleep 50 ms -> metadata file should have been updated, otherwise use old metadata
+ time.sleep(0.050)
+ container_size_in_gb = get_container_size()
+ result["containerSize"] = round(container_size_in_gb, 1)
+ result["containerSizeLimit"] = round(MAX_CONTAINER_SIZE)
+
+ if container_size_in_gb > MAX_CONTAINER_SIZE:
+ # Still bigger after update -> show the warning
+ result["containerSizeWarning"] = True
+ log.info(
+ "You have exceeded the limit the container size. Please clean up."
+ )
+ else:
+ result["containerSizeWarning"] = False
+
+ workspace_folder_size_in_gb = get_workspace_folder_size()
+
+ if MAX_WORKSPACE_FOLDER_SIZE:
+ if workspace_folder_size_in_gb > MAX_WORKSPACE_FOLDER_SIZE:
+ # Wait for metadata update before showing the warning
+ # sleep 50 ms -> metadata file should have been updated, otherwise use old metadata
+ time.sleep(0.050)
+ workspace_folder_size_in_gb = get_workspace_folder_size()
+ result["workspaceFolderSize"] = round(workspace_folder_size_in_gb, 1)
+ result["workspaceFolderSizeLimit"] = round(MAX_WORKSPACE_FOLDER_SIZE)
+
+ if workspace_folder_size_in_gb > MAX_WORKSPACE_FOLDER_SIZE:
+ # Still bigger after update -> show the warning
+ result["workspaceFolderSizeWarning"] = True
+ log.info(
+ "You have exceeded the limit the workspace folder size. Please clean up."
+ )
+ else:
+ result["workspaceFolderSizeWarning"] = False
+
+ self.finish(json.dumps(result))
+
+ except Exception as ex:
+ handle_error(self, 500, exception=ex)
+ return
+
+
+# ------------- Storage Check Utils ------------------------
+
+
+def get_last_usage_date(path):
+ date = None
+
+ if not os.path.exists(path):
+ log.info("Path does not exist: " + path)
+ return date
+
+ try:
+ date = datetime.fromtimestamp(os.path.getmtime(path))
+ except Exception:
+ pass
+
+ try:
+ compare_date = datetime.fromtimestamp(os.path.getatime(path))
+ if date.date() < compare_date.date():
+ # compare date is newer
+ date = compare_date
+ except Exception:
+ pass
+
+ try:
+ compare_date = datetime.fromtimestamp(os.path.getctime(path))
+ if date.date() < compare_date.date():
+ # compare date is newer
+ date = compare_date
+ except Exception:
+ pass
+
+ return date
+
+
+def update_workspace_metadata():
+ workspace_metadata = {
+ "update_timestamp": str(datetime.now()),
+ "container_size_in_kb": None,
+ "workspace_folder_size_in_kb": None
+ }
+
+ if MAX_CONTAINER_SIZE:
+ # calculate container size via the root folder
+ try:
+ # exclude all different filesystems/mounts
+ workspace_metadata["container_size_in_kb"] = int(
+ subprocess.check_output(["du", "-sx", "--exclude=/proc", "/"]).split()[0].decode("utf-8")
+ )
+ except Exception:
+ pass
+
+ if MAX_WORKSPACE_FOLDER_SIZE:
+ # calculate workspace folder size
+ try:
+ # exclude all different filesystems/mounts
+ workspace_metadata["workspace_folder_size_in_kb"] = int(
+ subprocess.check_output(["du", "-sx", WORKSPACE_HOME]).split()[0].decode("utf-8")
+ )
+ except Exception:
+ pass
+
+ if not os.path.exists(WORKSPACE_CONFIG_FOLDER):
+ os.makedirs(WORKSPACE_CONFIG_FOLDER)
+
+ with open(os.path.join(WORKSPACE_CONFIG_FOLDER, "metadata.json"), "w") as file:
+ json.dump(workspace_metadata, file, sort_keys=True, indent=4)
+
+
+def get_workspace_metadata():
+ workspace_metadata = {}
+ metadata_file_path = os.path.join(WORKSPACE_CONFIG_FOLDER, "metadata.json")
+ if os.path.isfile(metadata_file_path):
+ try:
+ with open(metadata_file_path, "rb") as file:
+ workspace_metadata = json.load(file)
+ except Exception:
+ pass
+ return workspace_metadata
+
+
+def get_container_size():
+ try:
+ workspace_metadata = get_workspace_metadata()
+ return int(workspace_metadata["container_size_in_kb"]) / 1024 / 1024
+ except Exception:
+ return 0
+
+
+def get_workspace_folder_size():
+ try:
+ workspace_metadata = get_workspace_metadata()
+ return int(workspace_metadata["workspace_folder_size_in_kb"]) / 1024 / 1024
+ except Exception:
+ return 0
+
+
+def get_minutes_since_size_update():
+ metadata_file_path = os.path.join(WORKSPACE_CONFIG_FOLDER, "metadata.json")
+ if os.path.isfile(metadata_file_path):
+ try:
+ with open(metadata_file_path, "rb") as file:
+ workspace_metadata = json.load(file)
+ update_timestamp_str = workspace_metadata["update_timestamp"]
+
+ if not update_timestamp_str:
+ return None
+ updated_date = datetime.strptime(
+ update_timestamp_str, "%Y-%m-%d %H:%M:%S.%f"
+ )
+ return ((datetime.now() - updated_date).seconds//60)%60
+ except Exception as ex:
+ return None
+ return None
+
+def get_inactive_days():
+ # read inactive days from metadata timestamp (update when user is actively using the workspace)
+ metadata_file_path = os.path.join(WORKSPACE_CONFIG_FOLDER, "metadata.json")
+ if os.path.isfile(metadata_file_path):
+ try:
+ with open(metadata_file_path, "rb") as file:
+ workspace_metadata = json.load(file)
+ update_timestamp_str = workspace_metadata["update_timestamp"]
+
+ if not update_timestamp_str:
+ return 0
+ updated_date = datetime.strptime(
+ update_timestamp_str, "%Y-%m-%d %H:%M:%S.%f"
+ )
+ inactive_days = (datetime.now() - updated_date).days
+ return inactive_days
+ except Exception:
+ return 0
+ # return 0 as fallback
+ return 0
+
+
+def cleanup_folder(
+ folder_path: str,
+ max_file_size_mb: int = 50,
+ last_file_usage: int = 3,
+ replace_with_info: bool = True,
+ excluded_folders: list = None,
+):
+ """
+ Cleanup folder to reduce disk space usage.
+ # Arguments
+ folder_path (str): Folder that should be cleaned.
+ max_file_size_mb (int): Max size of files in MB that should be deleted. Default: 50.
+ replace_with_info (bool): Replace removed files with `.removed.txt` files with file removal reason. Default: True.
+ last_file_usage (int): Number of days a file wasn't used to allow the file to be removed. Default: 3.
+ excluded_folders (list[str]): List of folders to exclude from removal (optional)
+ """
+ total_cleaned_up_mb = 0
+ removed_files = 0
+
+ for dirname, subdirs, files in os.walk(folder_path):
+ if excluded_folders:
+ for excluded_folder in excluded_folders:
+ if excluded_folder in subdirs:
+ log.debug("Ignoring folder because of name: " + excluded_folder)
+ subdirs.remove(excluded_folder)
+ for filename in files:
+ file_path = os.path.join(dirname, filename)
+
+ file_size_mb = int(os.path.getsize(file_path) / (1024.0 * 1024.0))
+ if max_file_size_mb and max_file_size_mb > file_size_mb:
+ # File will not be deleted since it is less than the max size
+ continue
+
+ last_file_usage_days = None
+ if get_last_usage_date(file_path):
+ last_file_usage_days = (
+ datetime.now() - get_last_usage_date(file_path)
+ ).days
+
+ if last_file_usage_days and last_file_usage_days <= last_file_usage:
+ continue
+
+ current_date_str = datetime.now().strftime("%B %d, %Y")
+ removal_reason = (
+ "File has been removed during folder cleaning ("
+ + folder_path
+ + ") on "
+ + current_date_str
+ + ". "
+ )
+ if file_size_mb and max_file_size_mb:
+ removal_reason += (
+ "The file size was "
+ + str(file_size_mb)
+ + " MB (max "
+ + str(max_file_size_mb)
+ + "). "
+ )
+
+ if last_file_usage_days and last_file_usage:
+ removal_reason += (
+ "The last usage was "
+ + str(last_file_usage_days)
+ + " days ago (max "
+ + str(last_file_usage)
+ + "). "
+ )
+
+ log.info(filename + ": " + removal_reason)
+
+ # Remove file
+ try:
+ os.remove(file_path)
+
+ if replace_with_info:
+ with open(file_path + ".removed.txt", "w") as file:
+ file.write(removal_reason)
+
+ if file_size_mb:
+ total_cleaned_up_mb += file_size_mb
+
+ removed_files += 1
+
+ except Exception as e:
+ log.info("Failed to remove file: " + file_path, e)
+
+ # check diskspace and update workspace metadata
+ update_workspace_metadata()
+ log.info(
+ "Finished cleaning. Removed "
+ + str(removed_files)
+ + " files with a total disk space of "
+ + str(total_cleaned_up_mb)
+ + " MB."
+ )
+
# ------------- GIT FUNCTIONS ------------------------
+
def execute_command(cmd: str):
- return subprocess.check_output(cmd.split()).decode('utf-8').replace("\n", "")
+ return subprocess.check_output(cmd.split()).decode("utf-8").replace("\n", "")
def get_repo(directory: str):
@@ -354,7 +735,7 @@ def get_repo(directory: str):
try:
return git.Repo(directory, search_parent_directories=True)
- except:
+ except Exception:
return None
@@ -362,7 +743,9 @@ def set_user_email(email: str, repo=None):
if repo:
repo.config_writer().set_value("user", "email", email).release()
else:
- exit_code = subprocess.call('git config --global user.email "' + email + '"', shell=True)
+ exit_code = subprocess.call(
+ 'git config --global user.email "' + email + '"', shell=True
+ )
if exit_code > 0:
warnings.warn("Global email configuration failed.")
@@ -371,7 +754,9 @@ def set_user_name(name: str, repo=None):
if repo:
repo.config_writer().set_value("user", "name", name).release()
else:
- exit_code = subprocess.call('git config --global user.name "' + name + '"', shell=True)
+ exit_code = subprocess.call(
+ 'git config --global user.name "' + name + '"', shell=True
+ )
if exit_code > 0:
warnings.warn("Global name configuration failed.")
@@ -388,19 +773,22 @@ def commit_file(file_path: str, commit_msg: str = None, push: bool = True):
repo.index.add([file_path])
if not get_user_name(repo):
- raise Exception('Cannot push to remote. Please specify a name with: git config --global user.name "YOUR NAME"')
+ raise Exception(
+ 'Cannot push to remote. Please specify a name with: git config --global user.name "YOUR NAME"'
+ )
if not get_user_email(repo):
raise Exception(
- 'Cannot push to remote. Please specify an email with: git config --global user.emails "YOUR EMAIL"')
+ 'Cannot push to remote. Please specify an email with: git config --global user.emails "YOUR EMAIL"'
+ )
if not commit_msg:
commit_msg = "Updated " + os.path.relpath(file_path, repo.working_dir)
try:
# fetch and merge newest state - fast-forward-only
- repo.git.pull('--ff-only')
- except:
+ repo.git.pull("--ff-only")
+ except Exception:
raise Exception("The repo is not up-to-date or cannot be updated.")
try:
@@ -408,7 +796,9 @@ def commit_file(file_path: str, commit_msg: str = None, push: bool = True):
repo.git.commit(file_path, m=commit_msg)
except git.GitCommandError as error:
if error.stdout and (
- "branch is up-to-date with" in error.stdout or "branch is up to date with" in error.stdout):
+ "branch is up-to-date with" in error.stdout
+ or "branch is up to date with" in error.stdout
+ ):
# TODO better way to check if file has changed, e.g. has_file_changed
raise Exception("File has not been changed: " + file_path)
else:
@@ -417,20 +807,26 @@ def commit_file(file_path: str, commit_msg: str = None, push: bool = True):
if push:
# Push file to remote
try:
- repo.git.push("origin", 'HEAD')
+ repo.git.push("origin", "HEAD")
except git.GitCommandError as error:
- if error.stderr and ( "No such device or address" in error.stderr and "could not read Username" in error.stderr):
- raise Exception("User is not authenticated. Please use Ungit to login via HTTPS or use SSH authentication.")
+ if error.stderr and (
+ "No such device or address" in error.stderr
+ and "could not read Username" in error.stderr
+ ):
+ raise Exception(
+ "User is not authenticated. Please use Ungit to login via HTTPS or use SSH authentication."
+ )
else:
raise error
+
def get_config_value(key: str, repo=None):
try:
if repo:
return repo.git.config(key)
# no repo, look up global config
- return execute_command('git config ' + key)
- except:
+ return execute_command("git config " + key)
+ except Exception:
return None
@@ -445,21 +841,25 @@ def get_user_email(repo=None):
def get_active_branch(repo) -> str or None:
try:
return repo.active_branch.name
- except:
+ except Exception:
return None
def get_last_commit(repo) -> str or None:
try:
- return datetime.fromtimestamp(repo.head.commit.committed_date).strftime("%d.%B %Y %I:%M:%S")
- except:
+ return datetime.fromtimestamp(repo.head.commit.committed_date).strftime(
+ "%d.%B %Y %I:%M:%S"
+ )
+ except Exception:
return None
def has_file_changed(repo, file_path: str):
# not working in all situations
changed_files = [item.a_path for item in repo.index.diff(None)]
- return os.path.relpath(os.path.realpath(file_path), repo.working_dir) in (path for path in changed_files)
+ return os.path.relpath(os.path.realpath(file_path), repo.working_dir) in (
+ path for path in changed_files
+ )
def get_git_info(directory: str):
@@ -470,13 +870,13 @@ def get_git_info(directory: str):
"repoRoot": repo.working_dir if repo else None,
"activeBranch": get_active_branch(repo) if repo else None,
"lastCommit": get_last_commit(repo) if repo else None,
- "requestPath": directory
+ "requestPath": directory,
}
return git_info
def _get_server_root() -> str:
- return os.path.expanduser(web_app.settings['server_root_dir'])
+ return os.path.expanduser(web_app.settings["server_root_dir"])
def _resolve_path(path: str) -> str or None:
@@ -492,48 +892,59 @@ def _resolve_path(path: str) -> str or None:
# ------------- SSH Functions ------------------------
def handle_ssh_script_request(handler):
- origin = handler.get_argument('origin', None)
- host = handler.get_argument('host', None)
- port = handler.get_argument('port', None)
+ origin = handler.get_argument("origin", None)
+ host = handler.get_argument("host", None)
+ port = handler.get_argument("port", None)
if not host and origin:
host, _ = parse_endpoint_origin(origin)
-
+
if not port and origin:
_, port = parse_endpoint_origin(origin)
-
+
if not host:
- handle_error(handler, 400, "Please provide a host via get parameter. Alternatively, you can also specify an origin with the full endpoint url.")
+ handle_error(
+ handler,
+ 400,
+ "Please provide a host via get parameter. Alternatively, you can also specify an origin with the full endpoint url.",
+ )
return
if not port:
- handle_error(handler, 400, "Please provide a port via get parameter. Alternatively, you can also specify an origin with the full endpoint url.")
- return
-
+ handle_error(
+ handler,
+ 400,
+ "Please provide a port via get parameter. Alternatively, you can also specify an origin with the full endpoint url.",
+ )
+ return
+
setup_script = get_setup_script(host, port)
- download_script_flag = handler.get_argument('download', None)
- if download_script_flag and download_script_flag.lower().strip() == 'true':
+ download_script_flag = handler.get_argument("download", None)
+ if download_script_flag and download_script_flag.lower().strip() == "true":
# Use host, otherwise it cannot be reconstructed in tooling plugin
-
- file_name = 'setup_ssh_{}-{}'.format(host.lower().replace(".", "-"), port)
+
+ file_name = "setup_ssh_{}-{}".format(host.lower().replace(".", "-"), port)
SSH_JUMPHOST_TARGET = os.environ.get("SSH_JUMPHOST_TARGET", "")
if SSH_JUMPHOST_TARGET:
# add name if variabl is set
file_name += "-" + SSH_JUMPHOST_TARGET.lower().replace(".", "-")
file_name += ".sh"
- handler.set_header('Content-Type', 'application/octet-stream')
- handler.set_header('Content-Disposition', 'attachment; filename=' + file_name) # Hostname runtime
+ handler.set_header("Content-Type", "application/octet-stream")
+ handler.set_header(
+ "Content-Disposition", "attachment; filename=" + file_name
+ ) # Hostname runtime
handler.write(setup_script)
handler.finish()
else:
handler.finish(setup_script)
-
+
def parse_endpoint_origin(endpoint_url: str):
# get host and port from endpoint url
from urllib.parse import urlparse
+
endpoint_url = urlparse(endpoint_url)
hostname = endpoint_url.hostname
port = endpoint_url.port
@@ -542,33 +953,36 @@ def parse_endpoint_origin(endpoint_url: str):
if endpoint_url.scheme == "https":
port = 443
return hostname, str(port)
-
+
+
def generate_token(base_url: str):
private_ssh_key_path = HOME + "/.ssh/id_ed25519"
with open(private_ssh_key_path, "r") as f:
runtime_private_key = f.read()
import hashlib
+
key_hasher = hashlib.sha1()
key_hasher.update(str.encode(str(runtime_private_key).lower().strip()))
key_hash = key_hasher.hexdigest()
token_hasher = hashlib.sha1()
- token_str = (key_hash+base_url).lower().strip()
+ token_str = (key_hash + base_url).lower().strip()
token_hasher.update(str.encode(token_str))
return str(token_hasher.hexdigest())
+
def get_setup_script(hostname: str = None, port: str = None):
-
+
private_ssh_key_path = HOME + "/.ssh/id_ed25519"
with open(private_ssh_key_path, "r") as f:
runtime_private_key = f.read()
ssh_templates_path = os.path.dirname(os.path.abspath(__file__)) + "/setup_templates"
- with open(ssh_templates_path + '/client_command.txt', 'r') as file:
+ with open(ssh_templates_path + "/client_command.txt", "r") as file:
client_command = file.read()
-
+
SSH_JUMPHOST_TARGET = os.environ.get("SSH_JUMPHOST_TARGET", "")
is_runtime_manager_existing = False if SSH_JUMPHOST_TARGET == "" else True
@@ -579,37 +993,49 @@ def get_setup_script(hostname: str = None, port: str = None):
PORT_MANAGER = port
PORT_RUNTIME = os.getenv("WORKSPACE_PORT", "8080")
- RUNTIME_CONFIG_NAME = RUNTIME_CONFIG_NAME + "{}-{}-{}".format(HOSTNAME_RUNTIME, HOSTNAME_MANAGER, PORT_MANAGER)
-
- client_command = client_command \
- .replace("{HOSTNAME_MANAGER}", HOSTNAME_MANAGER) \
- .replace("{PORT_MANAGER}", str(PORT_MANAGER)) \
+ RUNTIME_CONFIG_NAME = RUNTIME_CONFIG_NAME + "{}-{}-{}".format(
+ HOSTNAME_RUNTIME, HOSTNAME_MANAGER, PORT_MANAGER
+ )
+
+ client_command = (
+ client_command.replace("{HOSTNAME_MANAGER}", HOSTNAME_MANAGER)
+ .replace("{PORT_MANAGER}", str(PORT_MANAGER))
.replace("#ProxyCommand", "ProxyCommand")
-
+ )
+
local_keyscan_replacement = "{}".format(HOSTNAME_RUNTIME)
else:
HOSTNAME_RUNTIME = hostname
PORT_RUNTIME = port
- RUNTIME_CONFIG_NAME = RUNTIME_CONFIG_NAME + "{}-{}".format(HOSTNAME_RUNTIME, PORT_RUNTIME)
+ RUNTIME_CONFIG_NAME = RUNTIME_CONFIG_NAME + "{}-{}".format(
+ HOSTNAME_RUNTIME, PORT_RUNTIME
+ )
- local_keyscan_replacement = "[{}]:{}".format(HOSTNAME_RUNTIME, PORT_RUNTIME)
+ local_keyscan_replacement = "[{}]:{}".format(HOSTNAME_RUNTIME, PORT_RUNTIME)
# perform keyscan with localhost to get the runtime's keyscan result.
# Replace then the "localhost" part in the returning string with the actual RUNTIME_HOST_NAME
local_keyscan_entry = get_ssh_keyscan_results("localhost")
if local_keyscan_entry is not None:
- local_keyscan_entry = local_keyscan_entry.replace("localhost", local_keyscan_replacement)
-
- output = client_command \
- .replace("{PRIVATE_KEY_RUNTIME}", runtime_private_key) \
- .replace("{HOSTNAME_RUNTIME}", HOSTNAME_RUNTIME) \
- .replace("{RUNTIME_KNOWN_HOST_ENTRY}", local_keyscan_entry) \
- .replace("{PORT_RUNTIME}", str(PORT_RUNTIME)) \
- .replace("{RUNTIME_CONFIG_NAME}", RUNTIME_CONFIG_NAME) \
- .replace("{RUNTIME_KEYSCAN_NAME}", local_keyscan_replacement.replace("[", "\[").replace("]", "\]"))
+ local_keyscan_entry = local_keyscan_entry.replace(
+ "localhost", local_keyscan_replacement
+ )
+
+ output = (
+ client_command.replace("{PRIVATE_KEY_RUNTIME}", runtime_private_key)
+ .replace("{HOSTNAME_RUNTIME}", HOSTNAME_RUNTIME)
+ .replace("{RUNTIME_KNOWN_HOST_ENTRY}", local_keyscan_entry)
+ .replace("{PORT_RUNTIME}", str(PORT_RUNTIME))
+ .replace("{RUNTIME_CONFIG_NAME}", RUNTIME_CONFIG_NAME)
+ .replace(
+ "{RUNTIME_KEYSCAN_NAME}",
+ local_keyscan_replacement.replace("[", "\[").replace("]", "\]"),
+ )
+ )
return output
+
def get_ssh_keyscan_results(host_name, host_port=22, key_format="ecdsa"):
"""
Perform the keyscan command to get the certicicate fingerprint (of specified format [e.g. rsa256, ecdsa, ...]) of the container.
@@ -624,7 +1050,8 @@ def get_ssh_keyscan_results(host_name, host_port=22, key_format="ecdsa"):
"""
keyscan_result = subprocess.run(
- ['ssh-keyscan', '-p', str(host_port), host_name], stdout=subprocess.PIPE)
+ ["ssh-keyscan", "-p", str(host_port), host_name], stdout=subprocess.PIPE
+ )
keys = keyscan_result.stdout.decode("utf-8").split("\n")
keyscan_entry = ""
for key in keys:
@@ -644,48 +1071,62 @@ def load_jupyter_server_extension(nb_server_app) -> None:
web_app = nb_server_app.web_app
log = nb_server_app.log
- host_pattern = '.*$'
+ host_pattern = ".*$"
# SharedSSHHandler
- route_pattern = url_path_join(web_app.settings['base_url'], '/tooling/ping')
+ route_pattern = url_path_join(web_app.settings["base_url"], "/tooling/ping")
web_app.add_handlers(host_pattern, [(route_pattern, PingHandler)])
- route_pattern = url_path_join(web_app.settings['base_url'], '/tooling/tools')
+ route_pattern = url_path_join(web_app.settings["base_url"], "/tooling/tools")
web_app.add_handlers(host_pattern, [(route_pattern, ToolingHandler)])
- route_pattern = url_path_join(web_app.settings['base_url'], '/tooling/tool-installers')
+ route_pattern = url_path_join(
+ web_app.settings["base_url"], "/tooling/tool-installers"
+ )
web_app.add_handlers(host_pattern, [(route_pattern, InstallToolHandler)])
- route_pattern = url_path_join(web_app.settings['base_url'], '/tooling/token')
+ route_pattern = url_path_join(web_app.settings["base_url"], "/tooling/token")
web_app.add_handlers(host_pattern, [(route_pattern, SharedTokenHandler)])
- route_pattern = url_path_join(web_app.settings['base_url'], '/tooling/git/info')
+ route_pattern = url_path_join(web_app.settings["base_url"], "/tooling/git/info")
web_app.add_handlers(host_pattern, [(route_pattern, GitInfoHandler)])
- route_pattern = url_path_join(web_app.settings['base_url'], '/tooling/git/commit')
+ route_pattern = url_path_join(web_app.settings["base_url"], "/tooling/git/commit")
web_app.add_handlers(host_pattern, [(route_pattern, GitCommitHandler)])
- route_pattern = url_path_join(web_app.settings['base_url'], '/tooling/ssh/setup-script')
+ web_app.add_handlers(
+ host_pattern,
+ [
+ (
+ url_path_join(web_app.settings["base_url"], "/tooling/storage/check"),
+ StorageCheckHandler,
+ )
+ ],
+ )
+
+ route_pattern = url_path_join(
+ web_app.settings["base_url"], "/tooling/ssh/setup-script"
+ )
web_app.add_handlers(host_pattern, [(route_pattern, SSHScriptHandler)])
- route_pattern = url_path_join(web_app.settings['base_url'], '/tooling/ssh/setup-command')
+ route_pattern = url_path_join(
+ web_app.settings["base_url"], "/tooling/ssh/setup-command"
+ )
web_app.add_handlers(host_pattern, [(route_pattern, SSHCommandHandler)])
- route_pattern = url_path_join(web_app.settings['base_url'], "/tooling/files/link")
+ route_pattern = url_path_join(web_app.settings["base_url"], "/tooling/files/link")
web_app.add_handlers(host_pattern, [(route_pattern, SharedFilesHandler)])
- route_pattern = url_path_join(web_app.settings['base_url'], SHARED_SSH_SETUP_PATH)
+ route_pattern = url_path_join(web_app.settings["base_url"], SHARED_SSH_SETUP_PATH)
web_app.add_handlers(host_pattern, [(route_pattern, SharedSSHHandler)])
- log.info('Extension jupyter-tooling-widget loaded successfully.')
+ log.info("Extension jupyter-tooling-widget loaded successfully.")
# Test routine. Can be invoked manually
if __name__ == "__main__":
- application = tornado.web.Application([
- (r'/test', HelloWorldHandler)
- ])
+ application = tornado.web.Application([(r"/test", HelloWorldHandler)])
application.listen(555)
tornado.ioloop.IOLoop.current().start()
From 5ffa7472bf1337988bab836205d25f91e0304759 Mon Sep 17 00:00:00 2001
From: Lukas Masuch
Date: Fri, 14 Feb 2020 04:13:00 +0100
Subject: [PATCH 014/293] Update readme
---
README.md | 20 +++++++++++++-------
1 file changed, 13 insertions(+), 7 deletions(-)
diff --git a/README.md b/README.md
index 10eadbb2..23e81216 100644
--- a/README.md
+++ b/README.md
@@ -254,7 +254,6 @@ In addition to the main workspace image (`mltooling/ml-workspace`), we provide o
-
@@ -273,7 +272,6 @@ docker run -p 8080:8080 mltooling/ml-workspace-minimal:latest
-
@@ -292,7 +290,6 @@ docker run -p 8080:8080 mltooling/ml-workspace-r:latest
-
@@ -312,7 +309,6 @@ docker run -p 8080:8080 mltooling/ml-workspace-spark:latest
-
@@ -852,7 +848,7 @@ Finally, use [docker build](https://docs.docker.com/engine/reference/commandline
-How to update a workspace container? (click to expand...)
+How to update a running workspace container? (click to expand...)
To update a running workspace instance to a more recent version, the running Docker container needs to be replaced with a new container based on the updated workspace image.
@@ -862,10 +858,20 @@ All data within the workspace that is not persisted to a mounted volume will be
Update Example (click to expand...)
-If the workspace is deployed via Docker (Kubernetes will have a different update process), you need to remove the existing container (via `docker rm`) and start a new one (via `docker run`) with the newer workspace image. Make sure to use the same configuration, volume, name, and port. For example, a workspace (image version `0.8.3`) was started with this command: `docker run -d -p 8080:8080 --name "ml-workspace" -v "/path/on/host:/workspace" --env AUTHENTICATE_VIA_JUPYTER="mytoken" --restart always mltooling/ml-workspace:0.8.3`) and needs to be updated to version `0.8.4`, you need to:
+If the workspace is deployed via Docker (Kubernetes will have a different update process), you need to remove the existing container (via `docker rm`) and start a new one (via `docker run`) with the newer workspace image. Make sure to use the same configuration, volume, name, and port. For example, a workspace (image version `0.8.7`) was started with this command:
+```
+docker run -d \
+ -p 8080:8080 \
+ --name "ml-workspace" \
+ -v "/path/on/host:/workspace" \
+ --env AUTHENTICATE_VIA_JUPYTER="mytoken" \
+ --restart always \
+ mltooling/ml-workspace:0.8.7
+```
+and needs to be updated to version `0.9.1`, you need to:
1. Stop and remove the running workspace container: `docker stop "ml-workspace" && docker rm "ml-workspace"`
-2. Start a new workspace container with the newer image and same configuration: `docker run -d -p 8080:8080 --name "ml-workspace" -v "/path/on/host:/workspace" --env AUTHENTICATE_VIA_JUPYTER="mytoken" --restart always mltooling/ml-workspace:latest`
+2. Start a new workspace container with the newer image and same configuration: `docker run -d -p 8080:8080 --name "ml-workspace" -v "/path/on/host:/workspace" --env AUTHENTICATE_VIA_JUPYTER="mytoken" --restart always mltooling/ml-workspace:0.9.1`
From 14bdfbcb8598f74d076f845f284077b5c65b8024 Mon Sep 17 00:00:00 2001
From: Lukas Masuch
Date: Fri, 14 Feb 2020 04:16:11 +0100
Subject: [PATCH 015/293] Update disk storage warning dialog
---
.../jupyter_tooling/tooling-shared-components.js | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/resources/jupyter/extensions/tooling-extension/jupyter_tooling/tooling-shared-components.js b/resources/jupyter/extensions/tooling-extension/jupyter_tooling/tooling-shared-components.js
index f85fdfe4..64bbe2bb 100644
--- a/resources/jupyter/extensions/tooling-extension/jupyter_tooling/tooling-shared-components.js
+++ b/resources/jupyter/extensions/tooling-extension/jupyter_tooling/tooling-shared-components.js
@@ -511,10 +511,10 @@ define(['base/js/namespace', 'jquery', 'base/js/dialog', 'require', 'exports', '
if (data["containerSizeWarning"]) {
warning_div += '
Size of your workspace container: ' + data["containerSize"] + ' GB / '+ data["containerSizeLimit"] + ' GB
';
- warning_div += '
You have exceeded the limit of available disk storage assigned to your workspace container. Usually, this includes everything stored outside of the /workspace folder (working directory). Your workspace container might be automatically reset, in case you do not clean up your container storage. This container reset will remove all files outside of the /workspace folder.
';
+ warning_div += '
You have exceeded the limit of available disk storage assigned to your workspace container. Usually, this includes everything stored outside of the /workspace folder (working directory). Your workspace container might be automatically reset if you do not free up storage space. This container reset will remove all files outside of the /workspace folder.
';
}
div.append('
'+warning_div+"
");
- div.append('
To find the largest files and directories, we recommend to use the terminal with the following command: ncdu / the Disk Usage Analyzer application accessible from Applications -> System within the VNC Desktop. Alternatively, you can also use
');
+ div.append('
To find the largest files and directories, we recommend to use the terminal with the following command: ncdu /. Alternatively, you can also use the Disk Usage Analyzer application accessible from Applications -> System within the VNC Desktop.
');
return div
}
From 23e146eaca5caa3ca3d42d28e17e1ff6f29c9549 Mon Sep 17 00:00:00 2001
From: Lukas Masuch
Date: Fri, 14 Feb 2020 19:25:09 +0100
Subject: [PATCH 016/293] Set default values for VNC envionment variables
---
resources/scripts/start-vnc-server.sh | 5 +++++
1 file changed, 5 insertions(+)
diff --git a/resources/scripts/start-vnc-server.sh b/resources/scripts/start-vnc-server.sh
index 9c338f39..ecb5830a 100644
--- a/resources/scripts/start-vnc-server.sh
+++ b/resources/scripts/start-vnc-server.sh
@@ -11,6 +11,11 @@
set -eu
+# Set default values for vnc settings if not provided
+VNC_PW=${VNC_PW:-"vncpassword"}
+VNC_RESOLUTION=${VNC_RESOLUTION:-"1600x900"}
+VNC_COL_DEPTH=${VNC_COL_DEPTH:-"24"}
+
mkdir -p $HOME/.vnc
touch $HOME/.vnc/passwd
From 37d2ced8509f6df6cc6d2eeb6dbc1c5ffecce3cb Mon Sep 17 00:00:00 2001
From: Lukas Masuch
Date: Fri, 14 Feb 2020 19:26:39 +0100
Subject: [PATCH 017/293] Prevent error if WORKSPACE_BASE_URL does not exist
---
resources/docker-entrypoint.py | 7 ++-----
1 file changed, 2 insertions(+), 5 deletions(-)
diff --git a/resources/docker-entrypoint.py b/resources/docker-entrypoint.py
index 01a650d6..fe4489fb 100644
--- a/resources/docker-entrypoint.py
+++ b/resources/docker-entrypoint.py
@@ -31,13 +31,10 @@ def set_env_variable(env_variable: str, value: str, ignore_if_set: bool = False)
# Manage base path dynamically
-ENV_JUPYTERHUB_SERVICE_PREFIX = os.getenv("JUPYTERHUB_SERVICE_PREFIX")
+ENV_JUPYTERHUB_SERVICE_PREFIX = os.getenv("JUPYTERHUB_SERVICE_PREFIX", None)
ENV_NAME_WORKSPACE_BASE_URL = "WORKSPACE_BASE_URL"
-base_url = os.environ[ENV_NAME_WORKSPACE_BASE_URL]
-
-if not base_url:
- base_url = ""
+base_url = os.getenv(ENV_NAME_WORKSPACE_BASE_URL, "")
if ENV_JUPYTERHUB_SERVICE_PREFIX:
# Installation with Jupyterhub
From f974742122d3eab02412516fe12d4ea8c7f85a73 Mon Sep 17 00:00:00 2001
From: Lukas Masuch
Date: Fri, 14 Feb 2020 19:27:00 +0100
Subject: [PATCH 018/293] Update readme
---
README.md | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/README.md b/README.md
index 23e81216..a09e8bc3 100644
--- a/README.md
+++ b/README.md
@@ -129,7 +129,7 @@ The workspace provides a variety of configuration options that can be used by se
INCLUDE_TUTORIALS
-
If true, a selection of tutorial and introduction notebooks are added to the /workspace folder at container startup, but only in if the folder is empty.
+
If true, a selection of tutorial and introduction notebooks are added to the /workspace folder at container startup, but only if the folder is empty.
true
@@ -636,7 +636,7 @@ Once the remote directory is mounted, you can interact with the remote file syst
### Remote Development
-The workspace can be integrated and used as a remote runtime (also known as remote kernel/machine/interpreter) for a variety of popular development tools and IDEs, such as Jupyter, VS Code, PyCharm, Colab, or Atom Hydrogen. Thereby, you can connect your favorite development tool running on your local machine to a remote machine for code execution. This enables a **local-quality development experience with remote-hosted compute resources**.
+The workspace can be integrated and used as a remote runtime (also known as remote kernel/machine/interpreter) for a variety of popular development tools and IDEs, such as Jupyter, VS Code, PyCharm, Colab, or Atom Hydrogen. Thereby, you can connect your favorite development tool running on your local machine to a remote machine for code execution. This enables a local-quality development experience with remote-hosted compute resources.
These integrations usually require a passwordless SSH connection from the local machine to the workspace. To set up an SSH connection, please follow the steps explained in the [SSH Access](#ssh-access) section.
From 8337f4dabe343234cfe08e7b61ca23a541dc985f Mon Sep 17 00:00:00 2001
From: Lukas Masuch
Date: Tue, 25 Feb 2020 19:19:10 +0100
Subject: [PATCH 019/293] Add multiprocessing issue to known problems
---
README.md | 42 ++++++++++++++++++++++++++++++++++++++++++
1 file changed, 42 insertions(+)
diff --git a/README.md b/README.md
index a09e8bc3..1935535e 100644
--- a/README.md
+++ b/README.md
@@ -936,6 +936,48 @@ docker run --shm-size=2G mltooling/ml-workspace:latest
+Multiprocessing code is unexpectedly slow (click to expand...)
+
+In general, the performance of running code within Docker is [nearly identical](https://stackoverflow.com/questions/21889053/what-is-the-runtime-performance-cost-of-a-docker-container) compared to running it directly on the machine. However, in case you have limited the container's CPU quota (as explained in [this section](#limit-memory--cpu)), the container can still see the full count of CPU cores available on the machine and there is no technical way to prevent this. Many libraries and tools will use the full CPU count (e.g., via `os.cpu_count()`) to set the number of threads used for multiprocessing/-threading. This might cause the program to start more threads/processes than it can efficiently handle with the available CPU quota, which can tremendously slow down the overall performance. Therefore, it is important to set the available CPU count or the maximum number of threads explicitly to the configured CPU quota. The workspace provides capabilities to detect the number of available CPUs automatically, which are used to configure a variety of common libraries via environment variables such as `OMP_NUM_THREADS` or `MKL_NUM_THREADS`. It is also possible to explicitly set the number of available CPUs at container startup via the `MAX_NUM_THREADS` environment variable (see [configuration section](https://github.com/ml-tooling/ml-workspace#configuration-options)). The same environment variable can also be used to get the number of available CPUs at runtime.
+
+Even though the automatic configuration capabilities of the workspace will fix a variety of inefficiencies, we still recommend configuring the number of available CPUs with all libraries explicitly. For example:
+
+```python
+import os
+MAX_NUM_THREADS = int(os.getenv("MAX_NUM_THREADS"))
+
+# Set in pytorch
+import torch
+torch.set_num_threads(MAX_NUM_THREADS)
+
+# Set in tensorflow
+import tensorflow as tf
+config = tf.ConfigProto(
+ device_count={"CPU": MAX_NUM_THREADS},
+ inter_op_parallelism_threads=MAX_NUM_THREADS,
+ intra_op_parallelism_threads=MAX_NUM_THREADS,
+)
+tf_session = tf.Session(config=config)
+
+# Set session for keras
+import keras.backend as K
+K.set_session(tf_session)
+
+# Set in sklearn estimator
+from sklearn.linear_model import LogisticRegression
+LogisticRegression(n_jobs=MAX_NUM_THREADS).fit(X, y)
+
+# Set for multiprocessing pool
+from multiprocessing import Pool
+
+with Pool(MAX_NUM_THREADS) as pool:
+ results = pool.map(lst)
+```
+
+
+
+
+
Nginx terminates with SIGILL core dumped error (click to expand...)
If you encounter the following error within the container logs when starting the workspace, it will most likely not be possible to run the workspace on your hardware:
From 2322caf29364fed8b632276f7801c1f0577f004b Mon Sep 17 00:00:00 2001
From: Benjamin Raethlein
Date: Wed, 28 Oct 2020 22:29:18 +0100
Subject: [PATCH 020/293] Don't install `nbinteract` as it uses an old version
of `nbconvert` which results in that notebooks cannot be opened
---
resources/libraries/requirements-full.txt | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/resources/libraries/requirements-full.txt b/resources/libraries/requirements-full.txt
index 6c2f41ed..34e6b597 100644
--- a/resources/libraries/requirements-full.txt
+++ b/resources/libraries/requirements-full.txt
@@ -231,7 +231,7 @@ jupyter-console==6.1.0
jupyter-kernel-gateway==2.4.0
jupyter-server-proxy==1.2.0
nbdev==0.2.9
-nbinteract==0.2.5
+#nbinteract==0.2.5
nbval==0.9.4
papermill==1.2.1
pivottablejs==0.9.0
From 07b39d1d94e41a9ea665837a891a75262af8863e Mon Sep 17 00:00:00 2001
From: Benjamin Raethlein
Date: Mon, 16 Nov 2020 16:57:42 +0100
Subject: [PATCH 021/293] Use universal_build lib for build.py
---
build.py | 180 +++++++++++++++++++-----------------------
gpu-flavor/build.py | 146 +++++++++++++++-------------------
r-flavor/build.py | 146 +++++++++++++++-------------------
spark-flavor/build.py | 146 +++++++++++++++-------------------
4 files changed, 269 insertions(+), 349 deletions(-)
diff --git a/build.py b/build.py
index 42347562..4e4de8d7 100644
--- a/build.py
+++ b/build.py
@@ -1,121 +1,107 @@
-import os, sys
import subprocess
import argparse
import datetime
-parser = argparse.ArgumentParser()
-parser.add_argument('--name', help='name of docker container', default="ml-workspace")
-parser.add_argument('--version', help='version tag of docker container', default="latest")
-parser.add_argument('--deploy', help='deploy docker container to remote', action='store_true')
-parser.add_argument('--flavor', help='flavor (full, light, minimal) used for docker container', default='full')
+from universal_build import build_utils
+
+parser = argparse.ArgumentParser(add_help=False)
+parser.add_argument(
+ "--flavor",
+ help="flavor (full, light, minimal) used for docker container",
+ default="full",
+)
REMOTE_IMAGE_PREFIX = "mltooling/"
+COMPONENT_NAME = "ml-workspace"
+FLAG_FLAVOR = "flavor"
+
+args = build_utils.get_sanitized_arguments(argument_parser=parser)
+
+if not args[FLAG_FLAVOR]:
+ args[FLAG_FLAVOR] = "full"
+
+args[FLAG_FLAVOR] = str(args[FLAG_FLAVOR]).lower()
+
+if args[FLAG_FLAVOR] == "all":
+ args[FLAG_FLAVOR] = "full"
+ build_utils.build(".", args)
+
+ args[FLAG_FLAVOR] = "light"
+ build_utils.build(".", args)
+
+ args[FLAG_FLAVOR] = "minimal"
+ build_utils.build(".", args)
+
+ args[FLAG_FLAVOR] = "r"
+ build_utils.build(".", args)
+
+ args[FLAG_FLAVOR] = "spark"
+ build_utils.build(".", args)
-args, unknown = parser.parse_known_args()
-if unknown:
- print("Unknown arguments "+str(unknown))
-
-# Wrapper to print out command
-def call(command):
- print("Executing: "+command)
- return subprocess.call(command, shell=True)
-
-# calls build scripts in every module with same flags
-def build(module="."):
-
- if not os.path.isdir(module):
- print("Could not find directory for " + module)
- sys.exit(1)
-
- build_command = "python build.py"
-
- if args.version:
- build_command += " --version=" + str(args.version)
-
- if args.deploy:
- build_command += " --deploy"
-
- if args.flavor:
- build_command += " --flavor=" + str(args.flavor)
-
- working_dir = os.path.dirname(os.path.realpath(__file__))
- full_command = "cd " + module + " && " + build_command + " && cd " + working_dir
- print("Building " + module + " with: " + full_command)
- failed = call(full_command)
- if failed:
- print("Failed to build module " + module)
- sys.exit(1)
-
-if not args.flavor:
- args.flavor = "full"
-
-args.flavor = str(args.flavor).lower()
-
-if args.flavor == "all":
- args.flavor = "full"
- build()
- args.flavor = "light"
- build()
- args.flavor = "minimal"
- build()
- args.flavor = "r"
- build()
- args.flavor = "spark"
- build()
- args.flavor = "gpu"
- build()
- sys.exit(0)
+ args[FLAG_FLAVOR] = "gpu"
+ build_utils.build(".", args)
+
+ build_utils.exit_process(0)
# unknown flavor -> try to build from subdirectory
-if args.flavor not in ["full", "minimal", "light"]:
+if args[FLAG_FLAVOR] not in ["full", "minimal", "light"]:
# assume that flavor has its own directory with build.py
- build(args.flavor + "-flavor")
- sys.exit(0)
-
-service_name = os.path.basename(os.path.dirname(os.path.realpath(__file__)))
-if args.name:
- service_name = args.name
+ build_utils.build(args[FLAG_FLAVOR], args)
+ build_utils.exit_process(0)
+service_name = COMPONENT_NAME
# Build full image without suffix if the flavor is not minimal or light
-if args.flavor in ["minimal", "light"]:
- service_name += "-" + args.flavor
+if args[FLAG_FLAVOR] in ["minimal", "light"]:
+ service_name += "-" + args[FLAG_FLAVOR]
# docker build
git_rev = "unknown"
try:
- git_rev = subprocess.check_output(["git", "rev-parse", "--short", "HEAD"]).decode('ascii').strip()
-except:
+ git_rev = (
+ subprocess.check_output(["git", "rev-parse", "--short", "HEAD"])
+ .decode("ascii")
+ .strip()
+ )
+except Exception:
pass
build_date = datetime.datetime.utcnow().isoformat("T") + "Z"
try:
- build_date = subprocess.check_output(['date', '-u', '+%Y-%m-%dT%H:%M:%SZ']).decode('ascii').strip()
-except:
+ build_date = (
+ subprocess.check_output(["date", "-u", "+%Y-%m-%dT%H:%M:%SZ"])
+ .decode("ascii")
+ .strip()
+ )
+except Exception:
pass
vcs_ref_build_arg = " --build-arg ARG_VCS_REF=" + str(git_rev)
build_date_build_arg = " --build-arg ARG_BUILD_DATE=" + str(build_date)
-flavor_build_arg = " --build-arg ARG_WORKSPACE_FLAVOR=" + str(args.flavor)
-version_build_arg = " --build-arg ARG_WORKSPACE_VERSION=" + str(args.version)
-
-versioned_image = service_name+":"+str(args.version)
-latest_image = service_name+":latest"
-failed = call("docker build -t "+ versioned_image + " -t " + latest_image + " "
- + version_build_arg + " " + flavor_build_arg+ " " + vcs_ref_build_arg + " " + build_date_build_arg + " ./")
-
-if failed:
- print("Failed to build container")
- sys.exit(1)
-
-remote_versioned_image = REMOTE_IMAGE_PREFIX + versioned_image
-call("docker tag " + versioned_image + " " + remote_versioned_image)
-
-remote_latest_image = REMOTE_IMAGE_PREFIX + latest_image
-call("docker tag " + latest_image + " " + remote_latest_image)
-
-if args.deploy:
- call("docker push " + remote_versioned_image)
-
- if "SNAPSHOT" not in args.version:
- # do not push SNAPSHOT builds as latest version
- call("docker push " + remote_latest_image)
+flavor_build_arg = " --build-arg ARG_WORKSPACE_FLAVOR=" + str(args[FLAG_FLAVOR])
+version_build_arg = " --build-arg ARG_WORKSPACE_VERSION=" + str(
+ args[build_utils.FLAG_VERSION]
+)
+
+if args[build_utils.FLAG_MAKE]:
+ build_args = (
+ version_build_arg
+ + " "
+ + flavor_build_arg
+ + " "
+ + vcs_ref_build_arg
+ + " "
+ + build_date_build_arg
+ )
+
+ completed_process = build_utils.build_docker_image(
+ COMPONENT_NAME, version=args[build_utils.FLAG_VERSION], build_args=build_args
+ )
+ if completed_process.returncode > 0:
+ build_utils.exit_process(1)
+
+if args[build_utils.FLAG_RELEASE]:
+ build_utils.release_docker_image(
+ COMPONENT_NAME,
+ args[build_utils.FLAG_VERSION],
+ args[build_utils.FLAG_DOCKER_IMAGE_PREFIX],
+ )
diff --git a/gpu-flavor/build.py b/gpu-flavor/build.py
index 054a6745..f1240a07 100644
--- a/gpu-flavor/build.py
+++ b/gpu-flavor/build.py
@@ -1,110 +1,88 @@
-import os, sys
import subprocess
import argparse
import datetime
-parser = argparse.ArgumentParser()
-parser.add_argument('--name', help='base name of docker container', default="ml-workspace")
-parser.add_argument('--version', help='version tag of docker container', default="latest")
-parser.add_argument('--deploy', help='deploy docker container to remote', action='store_true')
-parser.add_argument('--flavor', help='flavor (gpu) used for docker container', default='gpu')
+from universal_build import build_utils
-REMOTE_IMAGE_PREFIX = "mltooling/"
-
-args, unknown = parser.parse_known_args()
-if unknown:
- print("Unknown arguments "+str(unknown))
-
-# Wrapper to print out command
-def call(command):
- print("Executing: "+command)
- return subprocess.call(command, shell=True)
-
-# calls build scripts in every module with same flags
-def build(module="."):
-
- if not os.path.isdir(module):
- print("Could not find directory for " + module)
- sys.exit(1)
-
- build_command = "python build.py"
+parser = argparse.ArgumentParser(add_help=False)
+parser.add_argument(
+ "--flavor",
+ help="flavor (full, light, minimal) used for docker container",
+ default="gpu",
+)
- if args.version:
- build_command += " --version=" + str(args.version)
-
- if args.deploy:
- build_command += " --deploy"
-
- if args.flavor:
- build_command += " --flavor=" + str(args.flavor)
+REMOTE_IMAGE_PREFIX = "mltooling/"
+FLAG_FLAVOR = "flavor"
+COMPONENT_NAME = "gpu-flavor"
- working_dir = os.path.dirname(os.path.realpath(__file__))
- full_command = "cd " + module + " && " + build_command + " && cd " + working_dir
- print("Building " + module + " with: " + full_command)
- failed = call(full_command)
- if failed:
- print("Failed to build module " + module)
- sys.exit(1)
+args = build_utils.get_sanitized_arguments(argument_parser=parser)
-if not args.flavor:
- args.flavor = "gpu"
+if not args[FLAG_FLAVOR]:
+ args[FLAG_FLAVOR] = "gpu"
-args.flavor = str(args.flavor).lower()
+args[FLAG_FLAVOR] = str(args[FLAG_FLAVOR]).lower()
-if args.flavor == "all":
- args.flavor = "gpu"
- build()
- sys.exit(0)
+if args[FLAG_FLAVOR] == "all":
+ args[FLAG_FLAVOR] = "gpu"
+ build_utils.build(".", args)
+ build_utils.exit_process(0)
# unknown flavor -> try to build from subdirectory
-if args.flavor not in ["gpu"]:
+if args[FLAG_FLAVOR] not in ["gpu"]:
# assume that flavor has its own directory with build.py
- build(args.flavor + "-flavor")
- sys.exit(0)
-
-service_name = os.path.basename(os.path.dirname(os.path.realpath(__file__)))
-if args.name:
- service_name = args.name
+ build_utils.build(args[FLAG_FLAVOR] + "-flavor", args)
+ build_utils.exit_process(0)
-# add flavor to service name
-service_name += "-" + args.flavor
+service_name = COMPONENT_NAME + "-" + args[FLAG_FLAVOR]
# docker build
git_rev = "unknown"
try:
- git_rev = subprocess.check_output(["git", "rev-parse", "--short", "HEAD"]).decode('ascii').strip()
-except:
+ git_rev = (
+ subprocess.check_output(["git", "rev-parse", "--short", "HEAD"])
+ .decode("ascii")
+ .strip()
+ )
+except Exception:
pass
build_date = datetime.datetime.utcnow().isoformat("T") + "Z"
try:
- build_date = subprocess.check_output(['date', '-u', '+%Y-%m-%dT%H:%M:%SZ']).decode('ascii').strip()
-except:
+ build_date = (
+ subprocess.check_output(["date", "-u", "+%Y-%m-%dT%H:%M:%SZ"])
+ .decode("ascii")
+ .strip()
+ )
+except Exception:
pass
vcs_ref_build_arg = " --build-arg ARG_VCS_REF=" + str(git_rev)
build_date_build_arg = " --build-arg ARG_BUILD_DATE=" + str(build_date)
-flavor_build_arg = " --build-arg ARG_WORKSPACE_FLAVOR=" + str(args.flavor)
-version_build_arg = " --build-arg ARG_WORKSPACE_VERSION=" + str(args.version)
-
-versioned_image = service_name+":"+str(args.version)
-latest_image = service_name+":latest"
-failed = call("docker build -t "+ versioned_image + " -t " + latest_image + " "
- + version_build_arg + " " + flavor_build_arg+ " " + vcs_ref_build_arg + " " + build_date_build_arg + " ./")
-
-if failed:
- print("Failed to build container")
- sys.exit(1)
-
-remote_versioned_image = REMOTE_IMAGE_PREFIX + versioned_image
-call("docker tag " + versioned_image + " " + remote_versioned_image)
-
-remote_latest_image = REMOTE_IMAGE_PREFIX + latest_image
-call("docker tag " + latest_image + " " + remote_latest_image)
-
-if args.deploy:
- call("docker push " + remote_versioned_image)
-
- if "SNAPSHOT" not in args.version:
- # do not push SNAPSHOT builds as latest version
- call("docker push " + remote_latest_image)
+flavor_build_arg = " --build-arg ARG_WORKSPACE_FLAVOR=" + str(args[FLAG_FLAVOR])
+version_build_arg = " --build-arg ARG_WORKSPACE_VERSION=" + str(
+ args[build_utils.FLAG_VERSION]
+)
+
+if args[build_utils.FLAG_MAKE]:
+ build_args = (
+ version_build_arg
+ + " "
+ + flavor_build_arg
+ + " "
+ + vcs_ref_build_arg
+ + " "
+ + build_date_build_arg
+ )
+
+ completed_process = build_utils.build_docker_image(
+ COMPONENT_NAME, version=args[build_utils.FLAG_VERSION], build_args=build_args
+ )
+ if completed_process.returncode > 0:
+ build_utils.exit_process(1)
+
+if args[build_utils.FLAG_RELEASE]:
+ build_utils.release_docker_image(
+ COMPONENT_NAME,
+ args[build_utils.FLAG_VERSION],
+ args[build_utils.FLAG_DOCKER_IMAGE_PREFIX],
+ )
diff --git a/r-flavor/build.py b/r-flavor/build.py
index 94324941..ab247635 100644
--- a/r-flavor/build.py
+++ b/r-flavor/build.py
@@ -1,110 +1,88 @@
-import os, sys
import subprocess
import argparse
import datetime
-parser = argparse.ArgumentParser()
-parser.add_argument('--name', help='base name of docker container', default="ml-workspace")
-parser.add_argument('--version', help='version tag of docker container', default="latest")
-parser.add_argument('--deploy', help='deploy docker container to remote', action='store_true')
-parser.add_argument('--flavor', help='flavor (r) used for docker container', default='r')
+from universal_build import build_utils
-REMOTE_IMAGE_PREFIX = "mltooling/"
-
-args, unknown = parser.parse_known_args()
-if unknown:
- print("Unknown arguments "+str(unknown))
-
-# Wrapper to print out command
-def call(command):
- print("Executing: "+command)
- return subprocess.call(command, shell=True)
-
-# calls build scripts in every module with same flags
-def build(module="."):
-
- if not os.path.isdir(module):
- print("Could not find directory for " + module)
- sys.exit(1)
-
- build_command = "python build.py"
+parser = argparse.ArgumentParser(add_help=False)
+parser.add_argument(
+ "--flavor",
+ help="flavor (full, light, minimal) used for docker container",
+ default="r",
+)
- if args.version:
- build_command += " --version=" + str(args.version)
-
- if args.deploy:
- build_command += " --deploy"
-
- if args.flavor:
- build_command += " --flavor=" + str(args.flavor)
+REMOTE_IMAGE_PREFIX = "mltooling/"
+FLAG_FLAVOR = "flavor"
+COMPONENT_NAME = "r-flavor"
- working_dir = os.path.dirname(os.path.realpath(__file__))
- full_command = "cd " + module + " && " + build_command + " && cd " + working_dir
- print("Building " + module + " with: " + full_command)
- failed = call(full_command)
- if failed:
- print("Failed to build module " + module)
- sys.exit(1)
+args = build_utils.get_sanitized_arguments(argument_parser=parser)
-if not args.flavor:
- args.flavor = "r"
+if not args[FLAG_FLAVOR]:
+ args[FLAG_FLAVOR] = "r"
-args.flavor = str(args.flavor).lower()
+args[FLAG_FLAVOR] = str(args[FLAG_FLAVOR]).lower()
-if args.flavor == "all":
- args.flavor = "r"
- build()
- sys.exit(0)
+if args[FLAG_FLAVOR] == "all":
+ args[FLAG_FLAVOR] = "r"
+ build_utils.build(".", args)
+ build_utils.exit_process(0)
# unknown flavor -> try to build from subdirectory
-if args.flavor not in ["r"]:
+if args[FLAG_FLAVOR] not in ["r"]:
# assume that flavor has its own directory with build.py
- build(args.flavor + "-flavor")
- sys.exit(0)
-
-service_name = os.path.basename(os.path.dirname(os.path.realpath(__file__)))
-if args.name:
- service_name = args.name
+ build_utils.build(args[FLAG_FLAVOR] + "-flavor", args)
+ build_utils.exit_process(0)
-# add flavor to service name
-service_name += "-" + args.flavor
+service_name = COMPONENT_NAME + "-" + args[FLAG_FLAVOR]
# docker build
git_rev = "unknown"
try:
- git_rev = subprocess.check_output(["git", "rev-parse", "--short", "HEAD"]).decode('ascii').strip()
-except:
+ git_rev = (
+ subprocess.check_output(["git", "rev-parse", "--short", "HEAD"])
+ .decode("ascii")
+ .strip()
+ )
+except Exception:
pass
build_date = datetime.datetime.utcnow().isoformat("T") + "Z"
try:
- build_date = subprocess.check_output(['date', '-u', '+%Y-%m-%dT%H:%M:%SZ']).decode('ascii').strip()
-except:
+ build_date = (
+ subprocess.check_output(["date", "-u", "+%Y-%m-%dT%H:%M:%SZ"])
+ .decode("ascii")
+ .strip()
+ )
+except Exception:
pass
vcs_ref_build_arg = " --build-arg ARG_VCS_REF=" + str(git_rev)
build_date_build_arg = " --build-arg ARG_BUILD_DATE=" + str(build_date)
-flavor_build_arg = " --build-arg ARG_WORKSPACE_FLAVOR=" + str(args.flavor)
-version_build_arg = " --build-arg ARG_WORKSPACE_VERSION=" + str(args.version)
-
-versioned_image = service_name+":"+str(args.version)
-latest_image = service_name+":latest"
-failed = call("docker build -t "+ versioned_image + " -t " + latest_image + " "
- + version_build_arg + " " + flavor_build_arg+ " " + vcs_ref_build_arg + " " + build_date_build_arg + " ./")
-
-if failed:
- print("Failed to build container")
- sys.exit(1)
-
-remote_versioned_image = REMOTE_IMAGE_PREFIX + versioned_image
-call("docker tag " + versioned_image + " " + remote_versioned_image)
-
-remote_latest_image = REMOTE_IMAGE_PREFIX + latest_image
-call("docker tag " + latest_image + " " + remote_latest_image)
-
-if args.deploy:
- call("docker push " + remote_versioned_image)
-
- if "SNAPSHOT" not in args.version:
- # do not push SNAPSHOT builds as latest version
- call("docker push " + remote_latest_image)
+flavor_build_arg = " --build-arg ARG_WORKSPACE_FLAVOR=" + str(args[FLAG_FLAVOR])
+version_build_arg = " --build-arg ARG_WORKSPACE_VERSION=" + str(
+ args[build_utils.FLAG_VERSION]
+)
+
+if args[build_utils.FLAG_MAKE]:
+ build_args = (
+ version_build_arg
+ + " "
+ + flavor_build_arg
+ + " "
+ + vcs_ref_build_arg
+ + " "
+ + build_date_build_arg
+ )
+
+ completed_process = build_utils.build_docker_image(
+ COMPONENT_NAME, version=args[build_utils.FLAG_VERSION], build_args=build_args
+ )
+ if completed_process.returncode > 0:
+ build_utils.exit_process(1)
+
+if args[build_utils.FLAG_RELEASE]:
+ build_utils.release_docker_image(
+ COMPONENT_NAME,
+ args[build_utils.FLAG_VERSION],
+ args[build_utils.FLAG_DOCKER_IMAGE_PREFIX],
+ )
diff --git a/spark-flavor/build.py b/spark-flavor/build.py
index 17360ffd..0bce0844 100644
--- a/spark-flavor/build.py
+++ b/spark-flavor/build.py
@@ -1,110 +1,88 @@
-import os, sys
import subprocess
import argparse
import datetime
-parser = argparse.ArgumentParser()
-parser.add_argument('--name', help='base name of docker container', default="ml-workspace")
-parser.add_argument('--version', help='version tag of docker container', default="latest")
-parser.add_argument('--deploy', help='deploy docker container to remote', action='store_true')
-parser.add_argument('--flavor', help='flavor (spark) used for docker container', default='spark')
+from universal_build import build_utils
-REMOTE_IMAGE_PREFIX = "mltooling/"
-
-args, unknown = parser.parse_known_args()
-if unknown:
- print("Unknown arguments "+str(unknown))
-
-# Wrapper to print out command
-def call(command):
- print("Executing: "+command)
- return subprocess.call(command, shell=True)
-
-# calls build scripts in every module with same flags
-def build(module="."):
-
- if not os.path.isdir(module):
- print("Could not find directory for " + module)
- sys.exit(1)
-
- build_command = "python build.py"
+parser = argparse.ArgumentParser(add_help=False)
+parser.add_argument(
+ "--flavor",
+ help="flavor (full, light, minimal) used for docker container",
+ default="spark",
+)
- if args.version:
- build_command += " --version=" + str(args.version)
-
- if args.deploy:
- build_command += " --deploy"
-
- if args.flavor:
- build_command += " --flavor=" + str(args.flavor)
+REMOTE_IMAGE_PREFIX = "mltooling/"
+FLAG_FLAVOR = "flavor"
+COMPONENT_NAME = "spark-flavor"
- working_dir = os.path.dirname(os.path.realpath(__file__))
- full_command = "cd " + module + " && " + build_command + " && cd " + working_dir
- print("Building " + module + " with: " + full_command)
- failed = call(full_command)
- if failed:
- print("Failed to build module " + module)
- sys.exit(1)
+args = build_utils.get_sanitized_arguments(argument_parser=parser)
-if not args.flavor:
- args.flavor = "spark"
+if not args[FLAG_FLAVOR]:
+ args[FLAG_FLAVOR] = "spark"
-args.flavor = str(args.flavor).lower()
+args[FLAG_FLAVOR] = str(args[FLAG_FLAVOR]).lower()
-if args.flavor == "all":
- args.flavor = "spark"
- build()
- sys.exit(0)
+if args[FLAG_FLAVOR] == "all":
+ args[FLAG_FLAVOR] = "spark"
+ build_utils.build(".", args)
+ build_utils.exit_process(0)
# unknown flavor -> try to build from subdirectory
-if args.flavor not in ["spark"]:
+if args[FLAG_FLAVOR] not in ["spark"]:
# assume that flavor has its own directory with build.py
- build(args.flavor + "-flavor")
- sys.exit(0)
-
-service_name = os.path.basename(os.path.dirname(os.path.realpath(__file__)))
-if args.name:
- service_name = args.name
+ build_utils.build(args[FLAG_FLAVOR] + "-flavor", args)
+ build_utils.exit_process(0)
-# add flavor to service name
-service_name += "-" + args.flavor
+service_name = COMPONENT_NAME + "-" + args[FLAG_FLAVOR]
# docker build
git_rev = "unknown"
try:
- git_rev = subprocess.check_output(["git", "rev-parse", "--short", "HEAD"]).decode('ascii').strip()
-except:
+ git_rev = (
+ subprocess.check_output(["git", "rev-parse", "--short", "HEAD"])
+ .decode("ascii")
+ .strip()
+ )
+except Exception:
pass
build_date = datetime.datetime.utcnow().isoformat("T") + "Z"
try:
- build_date = subprocess.check_output(['date', '-u', '+%Y-%m-%dT%H:%M:%SZ']).decode('ascii').strip()
-except:
+ build_date = (
+ subprocess.check_output(["date", "-u", "+%Y-%m-%dT%H:%M:%SZ"])
+ .decode("ascii")
+ .strip()
+ )
+except Exception:
pass
vcs_ref_build_arg = " --build-arg ARG_VCS_REF=" + str(git_rev)
build_date_build_arg = " --build-arg ARG_BUILD_DATE=" + str(build_date)
-flavor_build_arg = " --build-arg ARG_WORKSPACE_FLAVOR=" + str(args.flavor)
-version_build_arg = " --build-arg ARG_WORKSPACE_VERSION=" + str(args.version)
-
-versioned_image = service_name+":"+str(args.version)
-latest_image = service_name+":latest"
-failed = call("docker build -t "+ versioned_image + " -t " + latest_image + " "
- + version_build_arg + " " + flavor_build_arg+ " " + vcs_ref_build_arg + " " + build_date_build_arg + " ./")
-
-if failed:
- print("Failed to build container")
- sys.exit(1)
-
-remote_versioned_image = REMOTE_IMAGE_PREFIX + versioned_image
-call("docker tag " + versioned_image + " " + remote_versioned_image)
-
-remote_latest_image = REMOTE_IMAGE_PREFIX + latest_image
-call("docker tag " + latest_image + " " + remote_latest_image)
-
-if args.deploy:
- call("docker push " + remote_versioned_image)
-
- if "SNAPSHOT" not in args.version:
- # do not push SNAPSHOT builds as latest version
- call("docker push " + remote_latest_image)
+flavor_build_arg = " --build-arg ARG_WORKSPACE_FLAVOR=" + str(args[FLAG_FLAVOR])
+version_build_arg = " --build-arg ARG_WORKSPACE_VERSION=" + str(
+ args[build_utils.FLAG_VERSION]
+)
+
+if args[build_utils.FLAG_MAKE]:
+ build_args = (
+ version_build_arg
+ + " "
+ + flavor_build_arg
+ + " "
+ + vcs_ref_build_arg
+ + " "
+ + build_date_build_arg
+ )
+
+ completed_process = build_utils.build_docker_image(
+ COMPONENT_NAME, version=args[build_utils.FLAG_VERSION], build_args=build_args
+ )
+ if completed_process.returncode > 0:
+ build_utils.exit_process(1)
+
+if args[build_utils.FLAG_RELEASE]:
+ build_utils.release_docker_image(
+ COMPONENT_NAME,
+ args[build_utils.FLAG_VERSION],
+ args[build_utils.FLAG_DOCKER_IMAGE_PREFIX],
+ )
From 272cb0718599d7016483ee272cbd53e07500b883 Mon Sep 17 00:00:00 2001
From: Benjamin Raethlein
Date: Mon, 16 Nov 2020 17:41:47 +0100
Subject: [PATCH 022/293] Add .editorconfig file
---
.editorconfig | 29 +++++++++++++++++++++++++++++
1 file changed, 29 insertions(+)
create mode 100644 .editorconfig
diff --git a/.editorconfig b/.editorconfig
new file mode 100644
index 00000000..6a94a62f
--- /dev/null
+++ b/.editorconfig
@@ -0,0 +1,29 @@
+# http://editorconfig.org
+
+root = true
+
+[*]
+charset = utf-8
+end_of_line = lf
+indent_style = space
+trim_trailing_whitespace = true
+insert_final_newline = true
+
+[*.md]
+max_line_length = off
+trim_trailing_whitespace = false
+
+[*.py]
+indent_size = 4
+
+[*.js]
+indent_size = 2
+
+[*.jsx]
+indent_size = 2
+
+[*.json]
+indent_size = 2
+
+[*.java]
+indent_size = 2
From 92c8f1c8b37c8859f260299fd16a1ca2e099057e Mon Sep 17 00:00:00 2001
From: Benjamin Raethlein
Date: Mon, 16 Nov 2020 17:42:09 +0100
Subject: [PATCH 023/293] Add GitHub Actions Workflow to build the workspace
---
.github/actions/build-environment/Dockerfile | 79 +++++++++++++++++++
.github/actions/build-environment/action.yml | 29 +++++++
.../actions/build-environment/entrypoint.sh | 45 +++++++++++
.github/workflows/build-pipeline.yml | 38 +++++++++
4 files changed, 191 insertions(+)
create mode 100644 .github/actions/build-environment/Dockerfile
create mode 100644 .github/actions/build-environment/action.yml
create mode 100644 .github/actions/build-environment/entrypoint.sh
create mode 100644 .github/workflows/build-pipeline.yml
diff --git a/.github/actions/build-environment/Dockerfile b/.github/actions/build-environment/Dockerfile
new file mode 100644
index 00000000..3f84a45e
--- /dev/null
+++ b/.github/actions/build-environment/Dockerfile
@@ -0,0 +1,79 @@
+FROM ubuntu:20.04
+
+# Fix for pipe operations: https://github.com/hadolint/hadolint/wiki/DL4006
+SHELL ["/bin/bash", "-o", "pipefail", "-c"]
+
+# Generate and Set locals
+# https://stackoverflow.com/questions/28405902/how-to-set-the-locale-inside-a-debian-ubuntu-docker-container#38553499
+RUN apt-get update \
+ && apt-get install -y --no-install-recommends locales \
+ && sed -i -e 's/# en_US.UTF-8 UTF-8/en_US.UTF-8 UTF-8/' /etc/locale.gen \
+ && locale-gen \
+ && dpkg-reconfigure --frontend=noninteractive locales \
+ && update-locale LANG=en_US.UTF-8 \
+ # Clean up
+ && apt-get clean \
+ && rm -rf /var/lib/apt/lists/*
+
+ENV LC_ALL="en_US.UTF-8" \
+ LANG="en_US.UTF-8" \
+ LANGUAGE="en_US:en" \
+ TZ="Europe/Berlin" \
+ DEBIAN_FRONTEND="noninteractive"
+
+# Install basics
+# hadolint ignore=DL3005
+RUN apt-get update --fix-missing \
+ && apt-get install -y --no-install-recommends apt-utils \
+ && apt-get -y upgrade \
+ && apt-get update \
+ && apt-get install -y --no-install-recommends \
+ apt-transport-https \
+ ca-certificates \
+ curl \
+ wget \
+ gnupg2 \
+ git \
+ jq \
+ software-properties-common \
+ # Clean up
+ && apt-get clean \
+ && rm -rf /var/lib/apt/lists/*
+
+# Add tini
+RUN wget --quiet https://github.com/krallin/tini/releases/download/v0.19.0/tini -O /tini && \
+ chmod +x /tini
+
+# Install Docker in Container
+RUN curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add - \
+ && add-apt-repository \
+ "deb [arch=amd64] https://download.docker.com/linux/ubuntu \
+ $(lsb_release -cs) \
+ stable" \
+ && apt-get update -y \
+ && apt-get install -y --no-install-recommends docker-ce=5:19.03.13~3-0~ubuntu-focal \
+ # Clean up
+ && apt-get clean \
+ && rm -rf /var/lib/apt/lists/*
+
+# Install Python
+RUN apt-get update \
+ && apt-get install -y --no-install-recommends \
+ python3 \
+ python3-pip \
+ && ln -s /usr/bin/python3 /usr/bin/python \
+ && ln -s /usr/bin/pip3 /usr/bin/pip \
+ # Clean up
+ && apt-get clean \
+ && rm -rf /var/lib/apt/lists/*
+#coverage tox pytest-runner
+
+# Install Universal-Build
+# hadolint ignore=DL3013
+RUN pip install --upgrade git+https://github.com/ml-tooling/universal-build.git
+
+COPY entrypoint.sh /entrypoint.sh
+
+RUN chmod +x /entrypoint.sh
+
+ENTRYPOINT ["/tini", "-g", "--", "/entrypoint.sh"]
diff --git a/.github/actions/build-environment/action.yml b/.github/actions/build-environment/action.yml
new file mode 100644
index 00000000..4aaa3417
--- /dev/null
+++ b/.github/actions/build-environment/action.yml
@@ -0,0 +1,29 @@
+name: "build-environment"
+description: "Environment to run build, test, and release steps."
+author: "ML Tooling "
+inputs:
+ build_args:
+ description: "Build arguments passed to the build.py scripts."
+ required: false
+ working_directory:
+ description: "Working directory from where the build command is run."
+ required: false
+ pypi_token:
+ description: "Personal access token for PyPI account."
+ required: false
+ pypi_repository:
+ description: "PyPI repository for publishing artifacts."
+ required: false
+ container_registry_url:
+ description: "URL used for container registry login."
+ required: false
+ container_registry_username:
+ description: "Username used for container registry login."
+ required: false
+ container_registry_password:
+ description: "Password used for container registry login."
+ required: false
+
+runs:
+ using: "docker"
+ image: "Dockerfile"
diff --git a/.github/actions/build-environment/entrypoint.sh b/.github/actions/build-environment/entrypoint.sh
new file mode 100644
index 00000000..a8eecf32
--- /dev/null
+++ b/.github/actions/build-environment/entrypoint.sh
@@ -0,0 +1,45 @@
+#!/bin/bash
+
+# Stops script execution if a command has an error
+set -e
+
+# set default build args if not provided
+export SERVICE_HOST="$_HOST_IP"
+if [ -z "$INPUT_BUILD_ARGS" ]; then
+ INPUT_BUILD_ARGS="--check --make --test"
+fi
+
+BUILD_SECRETS=""
+
+if [ -n "$GITHUB_TOKEN" ]; then
+ # Use the github token to authenticate the git interaction (see this Stackoverflow answer: https://stackoverflow.com/a/57229018/5379273)
+ git config --global url."https://api:$GITHUB_TOKEN@github.com/".insteadOf "https://github.com/"
+ git config --global url."https://ssh:$GITHUB_TOKEN@github.com/".insteadOf "ssh://git@github.com/"
+ git config --global url."https://git:$GITHUB_TOKEN@github.com/".insteadOf "git@github.com:"
+
+ BUILD_SECRETS="$BUILD_SECRETS --github-token=$GITHUB_TOKEN"
+fi
+
+if [ -n "$INPUT_CONTAINER_REGISTRY_USERNAME" ] && [ -n "$INPUT_CONTAINER_REGISTRY_PASSWORD" ]; then
+ docker login $INPUT_CONTAINER_REGISTRY_URL -u "$INPUT_CONTAINER_REGISTRY_USERNAME" -p "$INPUT_CONTAINER_REGISTRY_PASSWORD"
+ BUILD_SECRETS="$BUILD_SECRETS --container-registry-url=$INPUT_CONTAINER_REGISTRY_URL"
+ BUILD_SECRETS="$BUILD_SECRETS --container-registry-username=$INPUT_CONTAINER_REGISTRY_USERNAME"
+ BUILD_SECRETS="$BUILD_SECRETS --container-registry-password=$INPUT_CONTAINER_REGISTRY_PASSWORD"
+fi
+
+# Navigate to working directory, if provided
+if [ -n "$INPUT_WORKING_DIRECTORY" ]; then
+ cd $INPUT_WORKING_DIRECTORY
+else
+ cd $GITHUB_WORKSPACE
+fi
+
+if [ -n "$INPUT_PYPI_TOKEN" ]; then
+ BUILD_SECRETS="$BUILD_SECRETS --pypi-token=$INPUT_PYPI_TOKEN"
+fi
+
+if [ -n "$INPUT_PYPI_REPOSITORY" ]; then
+ BUILD_SECRETS="$BUILD_SECRETS --pypi-repository=$INPUT_PYPI_REPOSITORY"
+fi
+
+python -u build.py $INPUT_BUILD_ARGS $BUILD_SECRETS
diff --git a/.github/workflows/build-pipeline.yml b/.github/workflows/build-pipeline.yml
new file mode 100644
index 00000000..b1c2d393
--- /dev/null
+++ b/.github/workflows/build-pipeline.yml
@@ -0,0 +1,38 @@
+name: build-pipeline
+
+on:
+ workflow_dispatch:
+ inputs:
+ build_args:
+ description: "Arguments passed to build script."
+ required: false
+ working_directory:
+ description: "Working directory from where the build command is run."
+ required: false
+
+env:
+ BUILD_ARGS: ${{ secrets.BUILD_ARGS }}
+ WORKING_DIRECTORY: ${{ secrets.WORKING_DIRECTORY }}
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ steps:
+ - name: set-input-build-args
+ if: ${{ github.event.inputs != null && github.event.inputs.build_args != null}}
+ run: echo "::set-env name=BUILD_ARGS::${{ github.event.inputs.build_args }}"
+ # new syntax: run: echo "BUILD_ARGS=${{ github.event.inputs.build_args }}" >> "$GITHUB_ENV"
+ - name: set-input-working-directory
+ if: ${{ github.event.inputs != null && github.event.inputs.working_directory != null}}
+ run: echo "::set-env name=WORKING_DIRECTORY::${{ github.event.inputs.working_directory }}"
+ # new syntax: run: echo "WORKING_DIRECTORY=${{ github.event.inputs.working_directory }}" >> "$GITHUB_ENV"
+ - uses: actions/checkout@v2
+ - name: set-host-ip
+ run: echo "::set-env name=_HOST_IP::$(hostname -I | cut -d ' ' -f 1)"
+ - name: run-build-scripts
+ uses: ./.github/actions/build-environment
+ with:
+ build_args: ${{ env.BUILD_ARGS }}
+ working_directory: ${{ env.WORKING_DIRECTORY }}
+
+ # GITHUB_TOKEN: ${{ secret.GITHUB_TOKEN }}
From 80d31a67d398aeea2eab0a8ad9ab4b87ce8c9593 Mon Sep 17 00:00:00 2001
From: Benjamin Raethlein
Date: Tue, 17 Nov 2020 17:02:27 +0100
Subject: [PATCH 024/293] Add build-env GitHub Action
---
.github/actions/build-environment/Dockerfile | 11 +++-
.github/actions/build-environment/action.yml | 2 +-
.github/workflows/check-commit-messages.yml | 20 +++++++
.github/workflows/close-stale-issues.yml | 19 +++++++
.github/workflows/pr-labeler.yml | 15 +++++
.github/workflows/release-pipeline.yml | 60 ++++++++++++++++++++
6 files changed, 125 insertions(+), 2 deletions(-)
create mode 100644 .github/workflows/check-commit-messages.yml
create mode 100644 .github/workflows/close-stale-issues.yml
create mode 100644 .github/workflows/pr-labeler.yml
create mode 100644 .github/workflows/release-pipeline.yml
diff --git a/.github/actions/build-environment/Dockerfile b/.github/actions/build-environment/Dockerfile
index 3f84a45e..07b00711 100644
--- a/.github/actions/build-environment/Dockerfile
+++ b/.github/actions/build-environment/Dockerfile
@@ -19,7 +19,8 @@ ENV LC_ALL="en_US.UTF-8" \
LANG="en_US.UTF-8" \
LANGUAGE="en_US:en" \
TZ="Europe/Berlin" \
- DEBIAN_FRONTEND="noninteractive"
+ DEBIAN_FRONTEND="noninteractive" \
+ HOME="/github/home"
# Install basics
# hadolint ignore=DL3005
@@ -36,6 +37,7 @@ RUN apt-get update --fix-missing \
git \
jq \
software-properties-common \
+ openssh-client \
# Clean up
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
@@ -71,9 +73,16 @@ RUN apt-get update \
# Install Universal-Build
# hadolint ignore=DL3013
RUN pip install --upgrade git+https://github.com/ml-tooling/universal-build.git
+RUN pip install docker
+RUN pip install pytest
COPY entrypoint.sh /entrypoint.sh
RUN chmod +x /entrypoint.sh
+RUN mkdir -p $HOME/.ssh/ && \
+ # create empty config file if not exists
+ touch $HOME/.ssh/config && \
+ chown -R root:root $HOME/.ssh && \
+ chmod 700 $HOME/.ssh
ENTRYPOINT ["/tini", "-g", "--", "/entrypoint.sh"]
diff --git a/.github/actions/build-environment/action.yml b/.github/actions/build-environment/action.yml
index 4aaa3417..7f0a5fd7 100644
--- a/.github/actions/build-environment/action.yml
+++ b/.github/actions/build-environment/action.yml
@@ -1,4 +1,4 @@
-name: "build-environment"
+name: "build-environment-workspace"
description: "Environment to run build, test, and release steps."
author: "ML Tooling "
inputs:
diff --git a/.github/workflows/check-commit-messages.yml b/.github/workflows/check-commit-messages.yml
new file mode 100644
index 00000000..e42663b4
--- /dev/null
+++ b/.github/workflows/check-commit-messages.yml
@@ -0,0 +1,20 @@
+name: check-commit-message-style
+
+on:
+ pull_request:
+ types:
+ - opened
+ - edited
+ - reopened
+ - synchronize
+ push:
+
+jobs:
+ check-commit-message-style:
+ runs-on: ubuntu-latest
+ steps:
+ - name: check-commit-message-style
+ uses: mristin/opinionated-commit-message@v2.2.0
+ with:
+ allow-one-liners: "true"
+ additional-verbs: "cleanup"
diff --git a/.github/workflows/close-stale-issues.yml b/.github/workflows/close-stale-issues.yml
new file mode 100644
index 00000000..e2354fcf
--- /dev/null
+++ b/.github/workflows/close-stale-issues.yml
@@ -0,0 +1,19 @@
+name: close-stale-issuess
+
+on:
+ workflow_dispatch:
+ schedule:
+ - cron: "30 1 * * *"
+
+jobs:
+ close-stale-issues:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/stale@v3
+ with:
+ repo-token: ${{ secrets.GITHUB_TOKEN }}
+ stale-issue-message: "This issue is stale because it has been open 90 days with no activity. Remove stale label or comment or this will be closed in 14 days"
+ stale-pr-message: "This PR is stale because it has been open 90 days with no activity. Remove stale label or comment or this will be closed in 14 days"
+ days-before-stale: 90
+ days-before-close: 14
+ stale-pr-label: stale
diff --git a/.github/workflows/pr-labeler.yml b/.github/workflows/pr-labeler.yml
new file mode 100644
index 00000000..ce1c58b0
--- /dev/null
+++ b/.github/workflows/pr-labeler.yml
@@ -0,0 +1,15 @@
+name: pr-labeler
+
+on:
+ - pull_request_target
+
+jobs:
+ pr-labeler:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/labeler@main
+ with:
+ repo-token: "${{ secrets.GITHUB_TOKEN }}"
+ configuration-path: .github/labeler.yml
+ # workaround for problem: https://github.com/wesnoth/wesnoth/commit/958c82d0867568057caaf58356502ec8c87d8366
+ sync-labels: ""
diff --git a/.github/workflows/release-pipeline.yml b/.github/workflows/release-pipeline.yml
new file mode 100644
index 00000000..44dcd33f
--- /dev/null
+++ b/.github/workflows/release-pipeline.yml
@@ -0,0 +1,60 @@
+name: release-pipeline
+
+on:
+ workflow_dispatch:
+ push:
+ tags:
+ - "v[0-9]+.[0-9]+.[0-9]+"
+
+jobs:
+ release:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+ - name: Fetch inofficial GitHub Action get-latest-version
+ run: wget https://github.com/ml-tooling/universal-build/releases/download/v0.1.0/github-action_get-latest-version.tar.gz -O tmp/github-action_get-latest-version.tar.gz && tar -xzvf tmp/github-action_get-latest-version.tar.gz -C ./.github/actions/
+ - name: get-latest-version
+ id: get-latest-version
+ uses: ./.github/actions/get-latest-version
+ with:
+ current_commit_only: true
+ - name: run-component-builds
+ uses: ./.github/actions/build-environment
+ with:
+ build_args: --make --force --version=${{ steps.get-latest-version.outputs.latest_version }}
+ - name: run-linting-and-style-checks
+ uses: ./.github/actions/build-environment
+ with:
+ build_args: --check --force --version=${{ steps.get-latest-version.outputs.latest_version }}
+ - name: run-component-tests
+ uses: ./.github/actions/build-environment
+ with:
+ build_args: --test --force --version=${{ steps.get-latest-version.outputs.latest_version }}
+ - name: release-components
+ uses: ./.github/actions/build-environment
+ with:
+ build_args: --release --force --version=${{ steps.get-latest-version.outputs.latest_version }}
+ pypi_token: ${{ secrets.PYPI_TOKEN }}
+ pypi_repository: ${{ secrets.PYPI_REPOSITORY }}
+ container_registry_url: ${{ secrets.CONTAINER_REGISTRY_URL }}
+ container_registry_username: ${{ secrets.CONTAINER_REGISTRY_USERNAME }}
+ container_registry_password: ${{ secrets.CONTAINER_REGISTRY_PASSWORD }}
+ create-release-draft:
+ needs: release
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+ - name: Fetch inofficial GitHub Action get-latest-version
+ run: wget https://github.com/ml-tooling/universal-build/releases/download/v0.1.0/github-action_get-latest-version.tar.gz -O tmp/github-action_get-latest-version.tar.gz && tar -xzvf tmp/github-action_get-latest-version.tar.gz -C ./.github/actions/
+ - name: get-latest-version
+ id: get-latest-version
+ uses: ./.github/actions/get-latest-version
+ with:
+ current_commit_only: false
+ - name: create-release-draft
+ uses: release-drafter/release-drafter@v5
+ with:
+ version: ${{ steps.get-latest-version.outputs.latest_version }}
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ # optional: upload release assets via actions/upload-release-asset@v1
From 08a8f62d24018fe50f7b6647b12225fd3db9bce6 Mon Sep 17 00:00:00 2001
From: Benjamin Raethlein
Date: Tue, 17 Nov 2020 17:07:31 +0100
Subject: [PATCH 025/293] Add first tests
---
build.py | 8 +++++
tests/run.py | 77 ++++++++++++++++++++++++++++++++++++++++
tests/test.py | 63 ++++++++++++++++++++++++++++++++
tests/workspace_tests.py | 12 +++++++
4 files changed, 160 insertions(+)
create mode 100644 tests/run.py
create mode 100644 tests/test.py
create mode 100644 tests/workspace_tests.py
diff --git a/build.py b/build.py
index 4e4de8d7..14edcde0 100644
--- a/build.py
+++ b/build.py
@@ -1,6 +1,8 @@
+import os
import subprocess
import argparse
import datetime
+import pytest
from universal_build import build_utils
@@ -99,6 +101,12 @@
if completed_process.returncode > 0:
build_utils.exit_process(1)
+if args[build_utils.FLAG_TEST]:
+ # test_exit_code = int(
+ # pytest.main(["-x", os.path.join("tests")])
+ # )
+ completed_process = build_utils.run("python ./tests/run.py", exit_on_error=True)
+
if args[build_utils.FLAG_RELEASE]:
build_utils.release_docker_image(
COMPONENT_NAME,
diff --git a/tests/run.py b/tests/run.py
new file mode 100644
index 00000000..f0fe1b27
--- /dev/null
+++ b/tests/run.py
@@ -0,0 +1,77 @@
+import docker
+import time
+import requests
+import sys
+import os
+import subprocess
+
+# from config import workspace_name, workspace_port, network_name
+
+client = docker.from_env()
+# try:
+# client.networks.get(network_name)
+# except docker.errors.NotFound:
+# client.networks.create(network_name, driver='bridge')
+workspace_name = "test-ml-workspace"
+container = client.containers.run(
+ 'mltooling/ml-workspace-minimal:0.9.1',
+ # network=network_name,
+ name=workspace_name,
+ environment={
+ "WORKSPACE_NAME": workspace_name
+ },
+ detach=True)
+
+container.reload()
+ip_address = container.attrs['NetworkSettings']['Networks']['bridge']['IPAddress']
+os.environ["WORKSPACE_IP"] = ip_address
+workspace_port = 8080
+
+index = 0
+health_url = f'http://{ip_address}:{workspace_port}/healthy'
+r = None
+while r == None or (r.status_code != 200 and index < 15):
+ index+=1
+ time.sleep(1)
+ try:
+ r = requests.get(health_url, allow_redirects=False, timeout=2)
+ except requests.ConnectionError as e:
+ # Catch error that is raised when the workspace container is not reachable yet
+ pass
+
+ if index == 15:
+ print("The workspace did not start")
+ sys.exit(-1)
+
+exit_code = 0
+# Sleep a moment to give all processes time to start within the Workspace container
+time.sleep(15)
+print("Workspace started! Execute tests:", flush=True)
+
+# Test workspace APIs and SSH
+print("Execute API and SSH Tests", flush=True)
+exit_code_api_test = subprocess.call(["python", "./tests/test.py"])
+
+# Test libraries within workspace
+print("Execute library tests within workspace", flush=True)
+## Copy and executing unit test file in workspace
+subprocess.call(["tar", "-cvf", "./tests/workspace_tests.py.tar", "-C", "./tests", "workspace_tests.py"], stdout=subprocess.PIPE)
+with open('./tests/workspace_tests.py.tar', 'r') as file:
+ container.put_archive(path="/tmp", data=file.read())
+exit_code_lib_test, output = container.exec_run("python /tmp/workspace_tests.py")
+print(output.decode("UTF-8"), flush=True)
+
+print("Executed tests.", flush=True)
+
+# Cleanup
+print("Clean up landscape", flush=True)
+container.remove(force=True)
+
+if (exit_code_api_test and exit_code_lib_test) != 0:
+ exit_code = 1
+elif exit_code_api_test != 0:
+ exit_code = 2
+elif exit_code_lib_test != 0:
+ exit_code = 3
+
+sys.exit(exit_code)
diff --git a/tests/test.py b/tests/test.py
new file mode 100644
index 00000000..8f41a143
--- /dev/null
+++ b/tests/test.py
@@ -0,0 +1,63 @@
+import unittest
+import requests
+import os
+
+from subprocess import run, PIPE
+
+import re
+
+# from config import workspace_name, workspace_port
+workspace_name = os.getenv("WORKSPACE_IP", "localhost")
+workspace_port = 8080
+
+class TestStringMethods(unittest.TestCase):
+
+ def test_healthy(self):
+ result = requests.get(f'http://{workspace_name}:{workspace_port}/healthy')
+ print(result.status_code)
+ self.assertEqual(result.status_code, 200)
+
+ def test_tool_vnc(self):
+ # Test whether tools are accessible
+ result = requests.get(f'http://{workspace_name}:{workspace_port}/tools/vnc/?password=vncpassword')
+ self.assertEqual(result.status_code, 200)
+ self.assertIn('Desktop VNC', result.text)
+
+ def test_tool_vscode(self):
+ result = requests.get(f'http://{workspace_name}:{workspace_port}/tools/vscode/')
+ self.assertEqual(result.status_code, 200)
+ self.assertIn('Microsoft Corporation', result.text)
+
+ def test_ssh(self):
+ result = requests.get(f'http://{workspace_name}:{workspace_port}/tooling/ssh/setup-command?origin=http://{workspace_name}:{workspace_port}')
+ self.assertEqual(result.status_code, 200)
+ self.assertIn('/bin/bash', result.text)
+ ssh_script_runner_regex = rf'^\/bin\/bash <\(curl -s --insecure "(http:\/\/{workspace_name}:{workspace_port}\/shared\/ssh\/setup\?token=[a-z0-9]+&host={workspace_name}&port={workspace_port})"\)$'
+ pattern = re.compile(ssh_script_runner_regex)
+ match = pattern.match(result.text)
+ self.assertIsNotNone(match)
+
+ # Execute the ssh setup script and automatically pass an ssh connection name to the script
+ script_url = match.groups()[0]
+ r = requests.get(script_url)
+ with open('/setup-ssh.sh', 'w') as f:
+ f.write(r.text)
+ # make the file executable for the user
+ os.chmod('/setup-ssh.sh', 0o744)
+ ssh_connection_name = 'test'
+ completed_process = run(['/bin/bash -c "/setup-ssh.sh"'], input=ssh_connection_name, encoding='ascii', shell=True, stdout=PIPE, stderr=PIPE)
+ self.assertEqual(completed_process.stderr, '')
+ # import time
+ # time.sleep(1200)
+ # problem is that ssh does not look for the ssh config in ~/.ssh/ but only in /etc/ssh/
+ # ssh -o UserKnownHostsFile=~/.ssh/known_hosts -i ~/.ssh/test -F ~/.ssh/config test
+ self.assertIn('Connection successful!', completed_process.stdout)
+
+ completed_process = run("ssh test 'echo $WORKSPACE_NAME'", shell=True, stdout=PIPE, stderr=PIPE)
+ self.assertEqual(completed_process.stderr, b'')
+ stdout = completed_process.stdout.decode('UTF-8').replace('\n', '')
+ self.assertEqual(stdout, workspace_name)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/tests/workspace_tests.py b/tests/workspace_tests.py
new file mode 100644
index 00000000..ec4ad186
--- /dev/null
+++ b/tests/workspace_tests.py
@@ -0,0 +1,12 @@
+import unittest
+
+import numpy as np
+
+class TestMethods(unittest.TestCase):
+ def test_pytorch(self):
+ size = 10
+ x = np.random.randint(2, size=size)
+ self.assertEqual(len(x), size)
+
+if __name__ == '__main__':
+ unittest.main()
From 00958150c415bc393b123a3a448223554f21e583 Mon Sep 17 00:00:00 2001
From: Benjamin Raethlein
Date: Tue, 17 Nov 2020 22:21:29 +0100
Subject: [PATCH 026/293] Fix tests so that they run via act
---
.github/actions/build-environment/Dockerfile | 14 ++++++++------
tests/run.py | 1 +
tests/test.py | 17 +++++++----------
3 files changed, 16 insertions(+), 16 deletions(-)
diff --git a/.github/actions/build-environment/Dockerfile b/.github/actions/build-environment/Dockerfile
index 07b00711..5525e41d 100644
--- a/.github/actions/build-environment/Dockerfile
+++ b/.github/actions/build-environment/Dockerfile
@@ -19,8 +19,7 @@ ENV LC_ALL="en_US.UTF-8" \
LANG="en_US.UTF-8" \
LANGUAGE="en_US:en" \
TZ="Europe/Berlin" \
- DEBIAN_FRONTEND="noninteractive" \
- HOME="/github/home"
+ DEBIAN_FRONTEND="noninteractive"
# Install basics
# hadolint ignore=DL3005
@@ -79,10 +78,13 @@ RUN pip install pytest
COPY entrypoint.sh /entrypoint.sh
RUN chmod +x /entrypoint.sh
-RUN mkdir -p $HOME/.ssh/ && \
+RUN mkdir -p /github/home/.ssh/ && \
# create empty config file if not exists
- touch $HOME/.ssh/config && \
- chown -R root:root $HOME/.ssh && \
- chmod 700 $HOME/.ssh
+ touch /github/home/.ssh/config && \
+ chown -R root:root /github/home/.ssh && \
+ chmod 700 /github/home/.ssh && \
+ # create a symlink because ssh looks in the /root/.ssh folder (home of root user) and not in the
+ # actual $HOME directory
+ ln -s /github/home/.ssh /root/.ssh
ENTRYPOINT ["/tini", "-g", "--", "/entrypoint.sh"]
diff --git a/tests/run.py b/tests/run.py
index f0fe1b27..d316507a 100644
--- a/tests/run.py
+++ b/tests/run.py
@@ -24,6 +24,7 @@
container.reload()
ip_address = container.attrs['NetworkSettings']['Networks']['bridge']['IPAddress']
+os.environ["WORKSPACE_NAME"] = workspace_name
os.environ["WORKSPACE_IP"] = ip_address
workspace_port = 8080
diff --git a/tests/test.py b/tests/test.py
index 8f41a143..3e5550df 100644
--- a/tests/test.py
+++ b/tests/test.py
@@ -7,32 +7,33 @@
import re
# from config import workspace_name, workspace_port
-workspace_name = os.getenv("WORKSPACE_IP", "localhost")
+workspace_host = os.getenv("WORKSPACE_IP", "localhost")
+workspace_name = os.getenv("WORKSPACE_NAME", "")
workspace_port = 8080
class TestStringMethods(unittest.TestCase):
def test_healthy(self):
- result = requests.get(f'http://{workspace_name}:{workspace_port}/healthy')
+ result = requests.get(f'http://{workspace_host}:{workspace_port}/healthy')
print(result.status_code)
self.assertEqual(result.status_code, 200)
def test_tool_vnc(self):
# Test whether tools are accessible
- result = requests.get(f'http://{workspace_name}:{workspace_port}/tools/vnc/?password=vncpassword')
+ result = requests.get(f'http://{workspace_host}:{workspace_port}/tools/vnc/?password=vncpassword')
self.assertEqual(result.status_code, 200)
self.assertIn('Desktop VNC', result.text)
def test_tool_vscode(self):
- result = requests.get(f'http://{workspace_name}:{workspace_port}/tools/vscode/')
+ result = requests.get(f'http://{workspace_host}:{workspace_port}/tools/vscode/')
self.assertEqual(result.status_code, 200)
self.assertIn('Microsoft Corporation', result.text)
def test_ssh(self):
- result = requests.get(f'http://{workspace_name}:{workspace_port}/tooling/ssh/setup-command?origin=http://{workspace_name}:{workspace_port}')
+ result = requests.get(f'http://{workspace_host}:{workspace_port}/tooling/ssh/setup-command?origin=http://{workspace_host}:{workspace_port}')
self.assertEqual(result.status_code, 200)
self.assertIn('/bin/bash', result.text)
- ssh_script_runner_regex = rf'^\/bin\/bash <\(curl -s --insecure "(http:\/\/{workspace_name}:{workspace_port}\/shared\/ssh\/setup\?token=[a-z0-9]+&host={workspace_name}&port={workspace_port})"\)$'
+ ssh_script_runner_regex = rf'^\/bin\/bash <\(curl -s --insecure "(http:\/\/{workspace_host}:{workspace_port}\/shared\/ssh\/setup\?token=[a-z0-9]+&host={workspace_host}&port={workspace_port})"\)$'
pattern = re.compile(ssh_script_runner_regex)
match = pattern.match(result.text)
self.assertIsNotNone(match)
@@ -47,10 +48,6 @@ def test_ssh(self):
ssh_connection_name = 'test'
completed_process = run(['/bin/bash -c "/setup-ssh.sh"'], input=ssh_connection_name, encoding='ascii', shell=True, stdout=PIPE, stderr=PIPE)
self.assertEqual(completed_process.stderr, '')
- # import time
- # time.sleep(1200)
- # problem is that ssh does not look for the ssh config in ~/.ssh/ but only in /etc/ssh/
- # ssh -o UserKnownHostsFile=~/.ssh/known_hosts -i ~/.ssh/test -F ~/.ssh/config test
self.assertIn('Connection successful!', completed_process.stdout)
completed_process = run("ssh test 'echo $WORKSPACE_NAME'", shell=True, stdout=PIPE, stderr=PIPE)
From 34141fe04d125b1daf91a9a11e8b5c742d5f575b Mon Sep 17 00:00:00 2001
From: Benjamin Raethlein
Date: Tue, 17 Nov 2020 22:21:44 +0100
Subject: [PATCH 027/293] Format Dockerfile
---
.github/actions/build-environment/Dockerfile | 10 +++++-----
1 file changed, 5 insertions(+), 5 deletions(-)
diff --git a/.github/actions/build-environment/Dockerfile b/.github/actions/build-environment/Dockerfile
index 5525e41d..49633d91 100644
--- a/.github/actions/build-environment/Dockerfile
+++ b/.github/actions/build-environment/Dockerfile
@@ -78,13 +78,13 @@ RUN pip install pytest
COPY entrypoint.sh /entrypoint.sh
RUN chmod +x /entrypoint.sh
-RUN mkdir -p /github/home/.ssh/ && \
+RUN mkdir -p /github/home/.ssh/ \
# create empty config file if not exists
- touch /github/home/.ssh/config && \
- chown -R root:root /github/home/.ssh && \
- chmod 700 /github/home/.ssh && \
+ && touch /github/home/.ssh/config \
+ && chown -R root:root /github/home/.ssh \
+ && chmod 700 /github/home/.ssh \
# create a symlink because ssh looks in the /root/.ssh folder (home of root user) and not in the
# actual $HOME directory
- ln -s /github/home/.ssh /root/.ssh
+ && ln -s /github/home/.ssh /root/.ssh
ENTRYPOINT ["/tini", "-g", "--", "/entrypoint.sh"]
From 5cedbce2e214eff1875e0c2123974db226da25eb Mon Sep 17 00:00:00 2001
From: Benjamin Raethlein
Date: Tue, 17 Nov 2020 22:21:59 +0100
Subject: [PATCH 028/293] Set temp flag so workflow works in GitHub Actions
---
.github/workflows/build-pipeline.yml | 1 +
1 file changed, 1 insertion(+)
diff --git a/.github/workflows/build-pipeline.yml b/.github/workflows/build-pipeline.yml
index b1c2d393..e0172a7e 100644
--- a/.github/workflows/build-pipeline.yml
+++ b/.github/workflows/build-pipeline.yml
@@ -13,6 +13,7 @@ on:
env:
BUILD_ARGS: ${{ secrets.BUILD_ARGS }}
WORKING_DIRECTORY: ${{ secrets.WORKING_DIRECTORY }}
+ ACTIONS_ALLOW_UNSECURE_COMMANDS: true # TODO: only needed until act supports the new way
jobs:
build:
From e953c0b79c10440c906aa8b787b1a9233d708683 Mon Sep 17 00:00:00 2001
From: Jan Kalkan
Date: Wed, 18 Nov 2020 19:58:01 +0100
Subject: [PATCH 029/293] Build: Switch to pytest
---
build.py | 4 +--
tests/run.py | 46 +++++++++++++++++-----------
tests/test.py | 60 ------------------------------------
tests/test_workspace.py | 67 +++++++++++++++++++++++++++++++++++++++++
4 files changed, 96 insertions(+), 81 deletions(-)
delete mode 100644 tests/test.py
create mode 100644 tests/test_workspace.py
diff --git a/build.py b/build.py
index 14edcde0..c39dabe5 100644
--- a/build.py
+++ b/build.py
@@ -1,8 +1,6 @@
-import os
-import subprocess
import argparse
import datetime
-import pytest
+import subprocess
from universal_build import build_utils
diff --git a/tests/run.py b/tests/run.py
index d316507a..7575ca78 100644
--- a/tests/run.py
+++ b/tests/run.py
@@ -1,9 +1,10 @@
-import docker
-import time
-import requests
-import sys
import os
import subprocess
+import sys
+import time
+
+import docker
+import requests
# from config import workspace_name, workspace_port, network_name
@@ -14,29 +15,28 @@
# client.networks.create(network_name, driver='bridge')
workspace_name = "test-ml-workspace"
container = client.containers.run(
- 'mltooling/ml-workspace-minimal:0.9.1',
+ "mltooling/ml-workspace-minimal:0.9.1",
# network=network_name,
name=workspace_name,
- environment={
- "WORKSPACE_NAME": workspace_name
- },
- detach=True)
+ environment={"WORKSPACE_NAME": workspace_name},
+ detach=True,
+)
container.reload()
-ip_address = container.attrs['NetworkSettings']['Networks']['bridge']['IPAddress']
+ip_address = container.attrs["NetworkSettings"]["Networks"]["bridge"]["IPAddress"]
os.environ["WORKSPACE_NAME"] = workspace_name
os.environ["WORKSPACE_IP"] = ip_address
workspace_port = 8080
index = 0
-health_url = f'http://{ip_address}:{workspace_port}/healthy'
+health_url = f"http://{ip_address}:{workspace_port}/healthy"
r = None
-while r == None or (r.status_code != 200 and index < 15):
- index+=1
+while r is None or (r.status_code != 200 and index < 15):
+ index += 1
time.sleep(1)
try:
r = requests.get(health_url, allow_redirects=False, timeout=2)
- except requests.ConnectionError as e:
+ except requests.ConnectionError:
# Catch error that is raised when the workspace container is not reachable yet
pass
@@ -51,13 +51,23 @@
# Test workspace APIs and SSH
print("Execute API and SSH Tests", flush=True)
-exit_code_api_test = subprocess.call(["python", "./tests/test.py"])
+exit_code_api_test = subprocess.call(["pytest", "-s", "tests"])
# Test libraries within workspace
print("Execute library tests within workspace", flush=True)
-## Copy and executing unit test file in workspace
-subprocess.call(["tar", "-cvf", "./tests/workspace_tests.py.tar", "-C", "./tests", "workspace_tests.py"], stdout=subprocess.PIPE)
-with open('./tests/workspace_tests.py.tar', 'r') as file:
+# Copy and executing unit test file in workspace
+subprocess.call(
+ [
+ "tar",
+ "-cvf",
+ "./tests/workspace_tests.py.tar",
+ "-C",
+ "./tests",
+ "workspace_tests.py",
+ ],
+ stdout=subprocess.PIPE,
+)
+with open("./tests/workspace_tests.py.tar", "r") as file:
container.put_archive(path="/tmp", data=file.read())
exit_code_lib_test, output = container.exec_run("python /tmp/workspace_tests.py")
print(output.decode("UTF-8"), flush=True)
diff --git a/tests/test.py b/tests/test.py
deleted file mode 100644
index 3e5550df..00000000
--- a/tests/test.py
+++ /dev/null
@@ -1,60 +0,0 @@
-import unittest
-import requests
-import os
-
-from subprocess import run, PIPE
-
-import re
-
-# from config import workspace_name, workspace_port
-workspace_host = os.getenv("WORKSPACE_IP", "localhost")
-workspace_name = os.getenv("WORKSPACE_NAME", "")
-workspace_port = 8080
-
-class TestStringMethods(unittest.TestCase):
-
- def test_healthy(self):
- result = requests.get(f'http://{workspace_host}:{workspace_port}/healthy')
- print(result.status_code)
- self.assertEqual(result.status_code, 200)
-
- def test_tool_vnc(self):
- # Test whether tools are accessible
- result = requests.get(f'http://{workspace_host}:{workspace_port}/tools/vnc/?password=vncpassword')
- self.assertEqual(result.status_code, 200)
- self.assertIn('Desktop VNC', result.text)
-
- def test_tool_vscode(self):
- result = requests.get(f'http://{workspace_host}:{workspace_port}/tools/vscode/')
- self.assertEqual(result.status_code, 200)
- self.assertIn('Microsoft Corporation', result.text)
-
- def test_ssh(self):
- result = requests.get(f'http://{workspace_host}:{workspace_port}/tooling/ssh/setup-command?origin=http://{workspace_host}:{workspace_port}')
- self.assertEqual(result.status_code, 200)
- self.assertIn('/bin/bash', result.text)
- ssh_script_runner_regex = rf'^\/bin\/bash <\(curl -s --insecure "(http:\/\/{workspace_host}:{workspace_port}\/shared\/ssh\/setup\?token=[a-z0-9]+&host={workspace_host}&port={workspace_port})"\)$'
- pattern = re.compile(ssh_script_runner_regex)
- match = pattern.match(result.text)
- self.assertIsNotNone(match)
-
- # Execute the ssh setup script and automatically pass an ssh connection name to the script
- script_url = match.groups()[0]
- r = requests.get(script_url)
- with open('/setup-ssh.sh', 'w') as f:
- f.write(r.text)
- # make the file executable for the user
- os.chmod('/setup-ssh.sh', 0o744)
- ssh_connection_name = 'test'
- completed_process = run(['/bin/bash -c "/setup-ssh.sh"'], input=ssh_connection_name, encoding='ascii', shell=True, stdout=PIPE, stderr=PIPE)
- self.assertEqual(completed_process.stderr, '')
- self.assertIn('Connection successful!', completed_process.stdout)
-
- completed_process = run("ssh test 'echo $WORKSPACE_NAME'", shell=True, stdout=PIPE, stderr=PIPE)
- self.assertEqual(completed_process.stderr, b'')
- stdout = completed_process.stdout.decode('UTF-8').replace('\n', '')
- self.assertEqual(stdout, workspace_name)
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/tests/test_workspace.py b/tests/test_workspace.py
new file mode 100644
index 00000000..865a85da
--- /dev/null
+++ b/tests/test_workspace.py
@@ -0,0 +1,67 @@
+import os
+import re
+from subprocess import PIPE, run
+
+import requests
+
+# from config import workspace_name, workspace_port
+workspace_host = os.getenv("WORKSPACE_IP", "localhost")
+workspace_name = os.getenv("WORKSPACE_NAME", "")
+workspace_port = 8080
+
+
+class TestStringMethods:
+ def test_healthy(self):
+ result = requests.get(f"http://{workspace_host}:{workspace_port}/healthy")
+ print(result.status_code)
+ assert result.status_code == 200
+
+ def test_tool_vnc(self):
+ # Test whether tools are accessible
+ result = requests.get(
+ f"http://{workspace_host}:{workspace_port}/tools/vnc/?password=vncpassword"
+ )
+ assert result.status_code == 200
+ assert "Desktop VNC" in result.text
+
+ def test_tool_vscode(self):
+ result = requests.get(f"http://{workspace_host}:{workspace_port}/tools/vscode/")
+ assert result.status_code == 200
+ assert "Microsoft Corporation" in result.text
+
+ def test_ssh(self):
+ result = requests.get(
+ f"http://{workspace_host}:{workspace_port}/tooling/ssh/setup-command?origin=http://{workspace_host}:{workspace_port}"
+ )
+ assert result.status_code == 200
+ assert "/bin/bash" in result.text
+ ssh_script_runner_regex = rf'^\/bin\/bash <\(curl -s --insecure "(http:\/\/{workspace_host}:{workspace_port}\/shared\/ssh\/setup\?token=[a-z0-9]+&host={workspace_host}&port={workspace_port})"\)$'
+ pattern = re.compile(ssh_script_runner_regex)
+ match = pattern.match(result.text)
+ assert match is not None
+
+ # Execute the ssh setup script and automatically pass an ssh connection name to the script
+ script_url = match.groups()[0]
+ r = requests.get(script_url)
+ with open("/setup-ssh.sh", "w") as f:
+ f.write(r.text)
+ # make the file executable for the user
+ os.chmod("/setup-ssh.sh", 0o744)
+ ssh_connection_name = "test"
+ completed_process = run(
+ ['/bin/bash -c "/setup-ssh.sh"'],
+ input=ssh_connection_name,
+ encoding="ascii",
+ shell=True,
+ stdout=PIPE,
+ stderr=PIPE,
+ )
+ assert completed_process.stderr == ""
+ assert "Connection successful!" in completed_process.stdout
+
+ completed_process = run(
+ "ssh test 'echo $WORKSPACE_NAME'", shell=True, stdout=PIPE, stderr=PIPE
+ )
+ assert completed_process.stderr == b""
+ stdout = completed_process.stdout.decode("UTF-8").replace("\n", "")
+ assert stdout == workspace_name
From eb5cc0b9b84702cbeb62171292d865f4218b3667 Mon Sep 17 00:00:00 2001
From: Benjamin Raethlein
Date: Thu, 19 Nov 2020 09:11:15 +0100
Subject: [PATCH 030/293] Add build workflow
---
.github/workflows/build-pipeline.yml | 39 ++++++++++++++++++++++++++++
1 file changed, 39 insertions(+)
create mode 100644 .github/workflows/build-pipeline.yml
diff --git a/.github/workflows/build-pipeline.yml b/.github/workflows/build-pipeline.yml
new file mode 100644
index 00000000..e0172a7e
--- /dev/null
+++ b/.github/workflows/build-pipeline.yml
@@ -0,0 +1,39 @@
+name: build-pipeline
+
+on:
+ workflow_dispatch:
+ inputs:
+ build_args:
+ description: "Arguments passed to build script."
+ required: false
+ working_directory:
+ description: "Working directory from where the build command is run."
+ required: false
+
+env:
+ BUILD_ARGS: ${{ secrets.BUILD_ARGS }}
+ WORKING_DIRECTORY: ${{ secrets.WORKING_DIRECTORY }}
+ ACTIONS_ALLOW_UNSECURE_COMMANDS: true # TODO: only needed until act supports the new way
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ steps:
+ - name: set-input-build-args
+ if: ${{ github.event.inputs != null && github.event.inputs.build_args != null}}
+ run: echo "::set-env name=BUILD_ARGS::${{ github.event.inputs.build_args }}"
+ # new syntax: run: echo "BUILD_ARGS=${{ github.event.inputs.build_args }}" >> "$GITHUB_ENV"
+ - name: set-input-working-directory
+ if: ${{ github.event.inputs != null && github.event.inputs.working_directory != null}}
+ run: echo "::set-env name=WORKING_DIRECTORY::${{ github.event.inputs.working_directory }}"
+ # new syntax: run: echo "WORKING_DIRECTORY=${{ github.event.inputs.working_directory }}" >> "$GITHUB_ENV"
+ - uses: actions/checkout@v2
+ - name: set-host-ip
+ run: echo "::set-env name=_HOST_IP::$(hostname -I | cut -d ' ' -f 1)"
+ - name: run-build-scripts
+ uses: ./.github/actions/build-environment
+ with:
+ build_args: ${{ env.BUILD_ARGS }}
+ working_directory: ${{ env.WORKING_DIRECTORY }}
+
+ # GITHUB_TOKEN: ${{ secret.GITHUB_TOKEN }}
From b9fe88e4830d88a2824fc6d626dd984a29cd8197 Mon Sep 17 00:00:00 2001
From: Benjamin Raethlein
Date: Thu, 19 Nov 2020 10:16:53 +0100
Subject: [PATCH 031/293] Use correct name for image (containing flavor)
---
build.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/build.py b/build.py
index c39dabe5..cc0d1aad 100644
--- a/build.py
+++ b/build.py
@@ -94,7 +94,7 @@
)
completed_process = build_utils.build_docker_image(
- COMPONENT_NAME, version=args[build_utils.FLAG_VERSION], build_args=build_args
+ service_name, version=args[build_utils.FLAG_VERSION], build_args=build_args
)
if completed_process.returncode > 0:
build_utils.exit_process(1)
@@ -107,7 +107,7 @@
if args[build_utils.FLAG_RELEASE]:
build_utils.release_docker_image(
- COMPONENT_NAME,
+ service_name,
args[build_utils.FLAG_VERSION],
args[build_utils.FLAG_DOCKER_IMAGE_PREFIX],
)
From f3077b59e9c81f348db743b7aca6751ed7a4f53d Mon Sep 17 00:00:00 2001
From: Jan Kalkan
Date: Thu, 19 Nov 2020 12:06:58 +0100
Subject: [PATCH 032/293] Refactor incubation zone
---
Dockerfile | 50 ++++++++++++++++++++++++++------------------------
1 file changed, 26 insertions(+), 24 deletions(-)
diff --git a/Dockerfile b/Dockerfile
index 24ef548c..76f571e8 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -63,7 +63,7 @@ RUN \
apt-get upgrade -y && \
apt-get update && \
apt-get install -y --no-install-recommends \
- # This is necessary for apt to access HTTPS sources:
+ # This is necessary for apt to access HTTPS sources:
apt-transport-https \
gnupg-agent \
gpg-agent \
@@ -286,7 +286,7 @@ RUN wget --no-verbose https://repo.anaconda.com/miniconda/Miniconda3-py37_${COND
ENV PATH=$CONDA_DIR/bin:$PATH
# There is nothing added yet to LD_LIBRARY_PATH, so we can overwrite
-ENV LD_LIBRARY_PATH=$CONDA_DIR/lib
+ENV LD_LIBRARY_PATH=$CONDA_DIR/lib
# Install node.js
RUN \
@@ -294,7 +294,7 @@ RUN \
# https://nodejs.org/en/about/releases/ use even numbered releases
curl -sL https://deb.nodesource.com/setup_12.x | sudo -E bash - && \
apt-get install -y nodejs && \
- # As conda is first in path, the commands 'node' and 'npm' reference to the version of conda.
+ # As conda is first in path, the commands 'node' and 'npm' reference to the version of conda.
# Replace those versions with the newly installed versions of node
rm -f /opt/conda/bin/node && ln -s /usr/bin/node /opt/conda/bin/node && \
rm -f /opt/conda/bin/npm && ln -s /usr/bin/npm /opt/conda/bin/npm && \
@@ -310,7 +310,7 @@ RUN \
echo "deb https://dl.yarnpkg.com/debian/ stable main" | sudo tee /etc/apt/sources.list.d/yarn.list && \
apt-get update && \
apt-get install -y --no-install-recommends yarn && \
- # Install typescript
+ # Install typescript
/usr/bin/npm install -g typescript && \
# Install webpack - 32 MB
/usr/bin/npm install -g webpack && \
@@ -328,7 +328,7 @@ RUN \
# Cleanup
clean-layer.sh
-ENV JAVA_HOME="/usr/lib/jvm/java-11-openjdk-amd64"
+ENV JAVA_HOME="/usr/lib/jvm/java-11-openjdk-amd64"
# TODO add MAVEN_HOME?
### END RUNTIMES ###
@@ -341,6 +341,7 @@ RUN \
# Install custom font
apt-get install -y xfce4 xfce4-terminal xterm && \
apt-get purge -y pm-utils xscreensaver* && \
+ apt-get install -y xfce4-clipman && \
# Cleanup
clean-layer.sh
@@ -409,7 +410,7 @@ RUN \
apt-get install -y --no-install-recommends gdebi && \
# Search for files
apt-get install -y --no-install-recommends catfish && \
- # TODO: Unable to locate package: apt-get install -y --no-install-recommends gnome-search-tool &&
+ # TODO: Unable to locate package: apt-get install -y --no-install-recommends gnome-search-tool &&
apt-get install -y --no-install-recommends font-manager && \
# vs support for thunar
apt-get install -y thunar-vcs-plugin && \
@@ -444,13 +445,13 @@ RUN \
ln -s /usr/bin/chromium-browser /usr/bin/google-chrome && \
# Cleanup
# Large package: gnome-user-guide 50MB app-install-data 50MB
- apt-get remove -y app-install-data gnome-user-guide && \
+ apt-get remove -y app-install-data gnome-user-guide && \
clean-layer.sh
# Add the defaults from /lib/x86_64-linux-gnu, otherwise lots of no version errors
# cannot be added above otherwise there are errors in the installation of the gui tools
# Call order: https://unix.stackexchange.com/questions/367600/what-is-the-order-that-linuxs-dynamic-linker-searches-paths-in
-ENV LD_LIBRARY_PATH=/lib/x86_64-linux-gnu:/usr/lib/x86_64-linux-gnu:$CONDA_DIR/lib
+ENV LD_LIBRARY_PATH=/lib/x86_64-linux-gnu:/usr/lib/x86_64-linux-gnu:$CONDA_DIR/lib
# Install Web Tools - Offered via Jupyter Tooling Plugin
@@ -520,8 +521,8 @@ RUN \
COPY resources/libraries ${RESOURCES_PATH}/libraries
### Install main data science libs
-RUN \
- # Link Conda - All python are linke to the conda instances
+RUN \
+ # Link Conda - All python are linke to the conda instances
# Linking python 3 crashes conda -> cannot install anyting - remove instead
#ln -s -f $CONDA_DIR/bin/python /usr/bin/python3 && \
# if removed -> cannot use add-apt-repository
@@ -531,7 +532,7 @@ RUN \
apt-get update && \
# upgrade pip
pip install --upgrade pip && \
- # If minimal flavor - install
+ # If minimal flavor - install
if [ "$WORKSPACE_FLAVOR" = "minimal" ]; then \
# Install nomkl - mkl needs lots of space
conda install -y --update-all nomkl ; \
@@ -620,7 +621,7 @@ RUN \
apt-get install -y libtesseract-dev && \
# Install libjpeg turbo for speedup in image processing
conda install -y libjpeg-turbo && \
- # Faiss - A library for efficient similarity search and clustering of dense vectors.
+ # Faiss - A library for efficient similarity search and clustering of dense vectors.
conda install -y -c pytorch faiss-cpu && \
# Install full pip requirements
pip install --no-cache-dir --upgrade -r ${RESOURCES_PATH}/libraries/requirements-full.txt && \
@@ -634,7 +635,7 @@ RUN \
# Fix conda version
RUN \
- # Conda installs wrong node version - relink conda node to the actual node
+ # Conda installs wrong node version - relink conda node to the actual node
rm -f /opt/conda/bin/node && ln -s /usr/bin/node /opt/conda/bin/node && \
rm -f /opt/conda/bin/npm && ln -s /usr/bin/npm /opt/conda/bin/npm
@@ -693,7 +694,7 @@ RUN \
jupyter nbextension enable --py --sys-prefix qgrid && \
# Activate Colab support
jupyter serverextension enable --py jupyter_http_over_ws && \
- # Activate Voila Rendering
+ # Activate Voila Rendering
# currently not working jupyter serverextension enable voila --sys-prefix && \
# Enable ipclusters
ipcluster nbextension enable && \
@@ -725,7 +726,7 @@ RUN \
$lab_ext_install jupyterlab_tensorboard && \
# install jupyterlab git
$lab_ext_install @jupyterlab/git && \
- pip install jupyterlab-git && \
+ pip install jupyterlab-git && \
jupyter serverextension enable --py jupyterlab_git && \
# For Matplotlib: https://github.com/matplotlib/jupyter-matplotlib
$lab_ext_install jupyter-matplotlib && \
@@ -861,13 +862,14 @@ RUN \
### END VSCODE ###
-### INCUBATION ZONE ###
+### INCUBATION ZONE ###
RUN \
apt-get update && \
# Newer jedi makes trouble with jupyterlab-lsp
+ # ! Not moved to prevent autoupdate
pip install --no-cache-dir jedi==0.15.2 && \
- apt-get install -y xfce4-clipman && \
+ # apt-get install -y xfce4-clipman && \
# required by rodeo ide (8MB)
# apt-get install -y libgconf2-4 && \
# required for pvporcupine (800kb)
@@ -876,7 +878,7 @@ RUN \
# apt-get install -y libasound2-dev libjack-dev && \
# libproj-dev required for cartopy (15MB)
# apt-get install -y libproj-dev && \
- # mysql server: 150MB
+ # mysql server: 150MB
# apt-get install -y mysql-server && \
# If minimal or light flavor -> exit here
if [ "$WORKSPACE_FLAVOR" = "minimal" ] || [ "$WORKSPACE_FLAVOR" = "light" ]; then \
@@ -1021,7 +1023,7 @@ RUN \
# MKL and Hardware Optimization
# Fix problem with MKL with duplicated libiomp5: https://github.com/dmlc/xgboost/issues/1715
-# Alternative - use openblas instead of Intel MKL: conda install -y nomkl
+# Alternative - use openblas instead of Intel MKL: conda install -y nomkl
# http://markus-beuckelmann.de/blog/boosting-numpy-blas.html
# MKL:
# https://software.intel.com/en-us/articles/tips-to-improve-performance-for-popular-deep-learning-frameworks-on-multi-core-cpus
@@ -1054,7 +1056,7 @@ ENV CONFIG_BACKUP_ENABLED="true" \
WORKSPACE_PORT="8080" \
# Set zsh as default shell (e.g. in jupyter)
SHELL="/usr/bin/zsh" \
- # Fix dark blue color for ls command (unreadable):
+ # Fix dark blue color for ls command (unreadable):
# https://askubuntu.com/questions/466198/how-do-i-change-the-color-for-directories-with-ls-in-the-console
# USE default LS_COLORS - Dont set LS COLORS - overwritten in zshrc
# LS_COLORS="" \
@@ -1062,7 +1064,7 @@ ENV CONFIG_BACKUP_ENABLED="true" \
# this can be problematic since docker restricts CPUs by stil showing all
MAX_NUM_THREADS="auto"
-### END CONFIGURATION ###
+### END CONFIGURATION ###
ARG ARG_BUILD_DATE="unknown"
ARG ARG_VCS_REF="unknown"
ARG ARG_WORKSPACE_VERSION="unknown"
@@ -1092,7 +1094,7 @@ LABEL \
"org.opencontainers.image.vendor"="ML Tooling" \
"org.opencontainers.image.authors"="Lukas Masuch & Benjamin Raethlein" \
"org.opencontainers.image.revision"=$ARG_VCS_REF \
- "org.opencontainers.image.created"=$ARG_BUILD_DATE \
+ "org.opencontainers.image.created"=$ARG_BUILD_DATE \
# Label Schema Convention (deprecated): http://label-schema.org/rc1/
"org.label-schema.name"="Machine Learning Workspace" \
"org.label-schema.description"="All-in-one web-based development environment for machine learning." \
@@ -1115,7 +1117,7 @@ LABEL \
# use global option with tini to kill full process groups: https://github.com/krallin/tini#process-group-killing
ENTRYPOINT ["/tini", "-g", "--"]
-CMD ["python", "/resources/docker-entrypoint.py"]
+CMD ["python", "/resources/docker-entrypoint.py"]
# Port 8080 is the main access port (also includes SSH)
# Port 5091 is the VNC port
@@ -1124,4 +1126,4 @@ CMD ["python", "/resources/docker-entrypoint.py"]
# See supervisor.conf for more ports
EXPOSE 8080
-###
\ No newline at end of file
+###
From da3cc1aaa645a011e07b0ce952fcd08e5e32a7f5 Mon Sep 17 00:00:00 2001
From: Jan Kalkan
Date: Thu, 19 Nov 2020 14:21:14 +0100
Subject: [PATCH 033/293] Update core process tools and interpreters
---
Dockerfile | 18 +++++++++---------
1 file changed, 9 insertions(+), 9 deletions(-)
diff --git a/Dockerfile b/Dockerfile
index 76f571e8..d13aca0b 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -181,7 +181,7 @@ RUN \
clean-layer.sh
# Add tini
-RUN wget --no-verbose https://github.com/krallin/tini/releases/download/v0.18.0/tini -O /tini && \
+RUN wget --no-verbose https://github.com/krallin/tini/releases/download/v0.19.0/tini -O /tini && \
chmod +x /tini
# prepare ssh for inter-container communication for remote python kernel
@@ -209,7 +209,7 @@ RUN \
clean-layer.sh
RUN \
- OPEN_RESTY_VERSION="1.15.8.3" && \
+ OPEN_RESTY_VERSION="1.19.3.1" && \
mkdir $RESOURCES_PATH"/openresty" && \
cd $RESOURCES_PATH"/openresty" && \
apt-get update && \
@@ -246,13 +246,13 @@ COPY resources/nginx/lua-extensions /etc/nginx/nginx_plugins
ENV \
CONDA_DIR=/opt/conda \
- PYTHON_VERSION="3.7.7" \
- CONDA_PYTHON_DIR=/opt/conda/lib/python3.7 \
+ PYTHON_VERSION="3.8.6" \
+ CONDA_PYTHON_DIR=/opt/conda/lib/python3.8 \
MINICONDA_VERSION=4.8.3 \
- MINICONDA_MD5=751786b92c00b1aeae3f017b781018df \
- CONDA_VERSION=4.8.3
+ MINICONDA_MD5=d63adf39f2c220950a063e0529d4ff74 \
+ CONDA_VERSION=4.9.1
-RUN wget --no-verbose https://repo.anaconda.com/miniconda/Miniconda3-py37_${CONDA_VERSION}-Linux-x86_64.sh -O ~/miniconda.sh && \
+RUN wget --no-verbose https://repo.anaconda.com/miniconda/Miniconda3-py38_${CONDA_VERSION}-Linux-x86_64.sh -O ~/miniconda.sh && \
echo "${MINICONDA_MD5} *miniconda.sh" | md5sum -c - && \
/bin/bash ~/miniconda.sh -b -p $CONDA_DIR && \
export PATH=$CONDA_DIR/bin:$PATH && \
@@ -291,8 +291,8 @@ ENV LD_LIBRARY_PATH=$CONDA_DIR/lib
# Install node.js
RUN \
apt-get update && \
- # https://nodejs.org/en/about/releases/ use even numbered releases
- curl -sL https://deb.nodesource.com/setup_12.x | sudo -E bash - && \
+ # https://nodejs.org/en/about/releases/ use even numbered releases, i.e. LTS versions
+ curl -sL https://deb.nodesource.com/setup_14.x | sudo -E bash - && \
apt-get install -y nodejs && \
# As conda is first in path, the commands 'node' and 'npm' reference to the version of conda.
# Replace those versions with the newly installed versions of node
From 4feb93675189e6ff7cb1925fc95568844aad15b0 Mon Sep 17 00:00:00 2001
From: Jan Kalkan
Date: Thu, 19 Nov 2020 14:22:01 +0100
Subject: [PATCH 034/293] Fix typo
---
docs/update-workspace-image.md | 26 ++++++++++++++++++--------
1 file changed, 18 insertions(+), 8 deletions(-)
diff --git a/docs/update-workspace-image.md b/docs/update-workspace-image.md
index e68b3e4f..a3e8033e 100644
--- a/docs/update-workspace-image.md
+++ b/docs/update-workspace-image.md
@@ -3,17 +3,20 @@
We plan to do a full workspace image update (all libraries and tools) about every three month. The full update involves quiet a bit of manual work as documented below:
1. Refactor incubation zone:
+
- Move ubuntu packages to basics or gui-tools section.
- Move python libraries to requirement files in `resources/libraries`.
- Refactor other installs.
-2. Update core (proecss) tools and interpreters:
+2. Update core (process) tools and interpreters:
+
- Tini: [latest release](https://github.com/krallin/tini/releases/latest)
- OpenResty: [latest release](https://openresty.org/en/download.html)
- Miniconda: [latest release](https://repo.continuum.io/miniconda/), [python version](https://anaconda.org/conda-forge/python)
- Node.js: [latest release](https://nodejs.org/en/download/current/)
3. Update core (gui) tools:
+
- TigetVNC: [latest release](https://dl.bintray.com/tigervnc/stable/)
- noVNC: [latest release](https://github.com/novnc/noVNC/releases/latest)
- Websockify: [latest release](https://github.com/novnc/websockify/releases/latest)
@@ -22,6 +25,7 @@ We plan to do a full workspace image update (all libraries and tools) about ever
- FileBrowser: [latest release](https://github.com/filebrowser/filebrowser/releases/latest)
4. Update conda packages:
+
- Jupyter Notebook: [latest release](https://anaconda.org/search?q=notebook&sort=ndownloads&sort_order=1&reverse=true)
- JupyterLab: [latest release](https://anaconda.org/search?q=jupyterlab&sort=ndownloads&sort_order=1&reverse=true)
- IPython: [latest release](https://anaconda.org/search?q=ipython&sort=ndownloads&sort_order=1&reverse=true)
@@ -29,6 +33,7 @@ We plan to do a full workspace image update (all libraries and tools) about ever
- PyTorch: [latest release](https://anaconda.org/search?q=pytorch&sort=ndownloads&sort_order=1&reverse=true)
5. Update VS-code extensions:
+
- python: [latest release](https://github.com/microsoft/vscode-python/releases/latest)
- java: [latest release](https://github.com/redhat-developer/vscode-java/releases)
- git-lens: [latest release](https://github.com/eamodio/vscode-gitlens/releases/latest)
@@ -38,6 +43,7 @@ We plan to do a full workspace image update (all libraries and tools) about ever
- remote-ssh: [latest release](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-ssh)
6. Update tool installer scripts:
+
- intellij.sh: [latest release](https://www.jetbrains.com/idea/download/other.html)
- pycharm.sh: [latest release](https://www.jetbrains.com/pycharm/download/other.html)
- nteract.sh: [latest release](https://github.com/nteract/nteract/releases/latest)
@@ -53,12 +59,14 @@ We plan to do a full workspace image update (all libraries and tools) about ever
- guacamole.sh: [latest relesase](https://guacamole.apache.org/releases/)
7. Update `minimmal` and `light` flavor python libraries:
+
- Update requirement files using [piprot](https://github.com/sesh/piprot), [pur](https://github.com/alanhamlett/pip-update-requirements), or [pip-upgrader](https://github.com/simion/pip-upgrader):
- `piprot ./resources/libraries/requirements-minimal.txt`
- `piprot ./resources/libraries/requirements-light.txt`
- [pur](https://github.com/alanhamlett/pip-update-requirements) example: `pur -i -r ./resources/libraries/requirements-minimal.txt`
8. Build and test `minimal` flavor:
+
- Build minimal workspace flavor via `python build.py --flavor=minimal`
- Run workspace container and check startup logs
- Check/Compare layer sizes of new image with previous version (via Portainer)
@@ -68,20 +76,22 @@ We plan to do a full workspace image update (all libraries and tools) about ever
- Jupyter, VNC, JupyterLab, VS-Code, Ungit, Netdata, Glances, Filebrowser, Access Port, SSH Access, Git Integration, Tensorboard
9. Build and test `light` flavor:
- - Build light workspace flavor via `python build.py --flavor=light`
- - Run workspace container and check startup logs
- - Check/Compare layer sizes of new image with previous version (via Portainer)
- - Check folder sizes via `Disk Usage Analyzer` within the Desktop VNC
- - Run `evaluate-python-libraries.ipynb` notebook to update `requirements-full.txt`
+
+ - Build light workspace flavor via `python build.py --flavor=light`
+ - Run workspace container and check startup logs
+ - Check/Compare layer sizes of new image with previous version (via Portainer)
+ - Check folder sizes via `Disk Usage Analyzer` within the Desktop VNC
+ - Run `evaluate-python-libraries.ipynb` notebook to update `requirements-full.txt`
10. Build and test `full` flavor:
+
- Build main workspace flavor via `python build.py --flavor=full`
- Deploy new workspace image and check startup logs
- Check/Compare layer sizes of new image with previous version (via Portainer)
- Check Image Labels (via Portainer)
- Check folder sizes via `Disk Usage Analyzer` within the Desktop VNC
- Check all webtools/features (just open and see of running):
- - Jupyter (+ Extensions), JupyterLab (+ Extensions), VNC, VS-Code (+ Extensions), Ungit, Netdata, Glances, Filebrowser, Access Port, SSH Access, Git Integration, Tensorboard
+ - Jupyter (+ Extensions), JupyterLab (+ Extensions), VNC, VS-Code (+ Extensions), Ungit, Netdata, Glances, Filebrowser, Access Port, SSH Access, Git Integration, Tensorboard
- Run from inside workspace: `/bin/bash /resources/tests/log-environment-info.sh`
- Run from inside workspace: `tutorials/workspace-test-utilities.ipynb`
- Check all gui-tools in VNC Desktop (just open and see of running)
@@ -96,4 +106,4 @@ We plan to do a full workspace image update (all libraries and tools) about ever
11. Build and test `gpu` flavor via `python build.py --flavor=gpu`
12. Build and test `R` flavor via `python build.py --flavor=R`
13. Build and test `spark` flavor via `python build.py --flavor=spark`
-14. Build and push all flavors via `python build.py --deploy --version= --flavor=all`
\ No newline at end of file
+14. Build and push all flavors via `python build.py --deploy --version= --flavor=all`
From c0efadf3a029a1e0deee1510743155aa7128f4c4 Mon Sep 17 00:00:00 2001
From: Benjamin Raethlein
Date: Thu, 19 Nov 2020 16:20:12 +0100
Subject: [PATCH 035/293] Update vscode eslint plugin to fix build issue
---
Dockerfile | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/Dockerfile b/Dockerfile
index 24ef548c..b182559b 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -842,7 +842,7 @@ RUN \
sleep $SLEEP_TIMER && \
# Install ESLint extension: https://marketplace.visualstudio.com/items?itemName=dbaeumer.vscode-eslint
# Older versions do not support vscode 1.39 - https://github.com/microsoft/vscode-eslint/
- VS_ESLINT_VERSION="2.1.8" && \
+ VS_ESLINT_VERSION="2.1.13" && \
wget --no-verbose https://marketplace.visualstudio.com/_apis/public/gallery/publishers/dbaeumer/vsextensions/vscode-eslint/$VS_ESLINT_VERSION/vspackage -O dbaeumer.vscode-eslint.vsix && \
bsdtar -xf dbaeumer.vscode-eslint.vsix extension && \
rm dbaeumer.vscode-eslint.vsix && \
From 062db38ff9b4e723176e6a844465f254df0d6aac Mon Sep 17 00:00:00 2001
From: Jan Kalkan
Date: Thu, 19 Nov 2020 18:33:03 +0100
Subject: [PATCH 036/293] Update core (gui) tools
---
Dockerfile | 11 +++++++----
resources/tools/filebrowser.sh | 4 ++--
resources/tools/ungit.sh | 2 +-
resources/tools/vs-code-server.sh | 6 ++++--
4 files changed, 14 insertions(+), 9 deletions(-)
diff --git a/Dockerfile b/Dockerfile
index d13aca0b..9f8894a3 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -379,14 +379,17 @@ RUN \
# apt-get install -y python-numpy && \
cd ${RESOURCES_PATH} && \
# Tiger VNC
- wget -qO- https://dl.bintray.com/tigervnc/stable/tigervnc-1.10.1.x86_64.tar.gz | tar xz --strip 1 -C / && \
+ wget -qO- https://dl.bintray.com/tigervnc/stable/tigervnc-1.11.0.x86_64.tar.gz | tar xz --strip 1 -C / && \
# Install websockify
mkdir -p ./novnc/utils/websockify && \
# Before updating the noVNC version, we need to make sure that our monkey patching scripts still work!!
- wget -qO- https://github.com/novnc/noVNC/archive/v1.1.0.tar.gz | tar xz --strip 1 -C ./novnc && \
+ # Todo: Check monkey patching script
+ wget -qO- https://github.com/novnc/noVNC/archive/v1.2.0.tar.gz | tar xz --strip 1 -C ./novnc && \
# use older version of websockify to prevent hanging connections on offline containers?, see https://github.com/ConSol/docker-headless-vnc-container/issues/50
- wget -qO- https://github.com/novnc/websockify/archive/v0.9.0.tar.gz | tar xz --strip 1 -C ./novnc/utils/websockify && \
- chmod +x -v ./novnc/utils/*.sh && \
+ # Todo: Check and remove previous comment since issue is closed and websockify seems already be updated to latest version?
+ # Todo: Check if still works after removal
+ # wget -qO- https://github.com/novnc/websockify/archive/v0.9.0.tar.gz | tar xz --strip 1 -C ./novnc/utils/websockify && \
+ # chmod +x -v ./novnc/utils/*.sh && \
# create user vnc directory
mkdir -p $HOME/.vnc && \
# Fix permissions
diff --git a/resources/tools/filebrowser.sh b/resources/tools/filebrowser.sh
index 36be160d..c32f7640 100644
--- a/resources/tools/filebrowser.sh
+++ b/resources/tools/filebrowser.sh
@@ -18,8 +18,8 @@ if [ ! -f "/usr/local/bin/filebrowser" ]; then
echo "Installing Filebrowser. Please wait..."
mkdir -p $RESOURCES_PATH/filebrowser
cd $RESOURCES_PATH/filebrowser
- # TODO: the 2.1.0 version does not work currently
- wget -q https://github.com/filebrowser/filebrowser/releases/download/v2.5.0/linux-amd64-filebrowser.tar.gz -O ./filebrowser.tar.gz
+ wget -q https://github.com/filebrowser/filebrowser/releases/download/v2.9.0/linux-amd64-filebrowser.tar.gz
+ -O ./filebrowser.tar.gz
tar -xzf ./filebrowser.tar.gz
chmod +x "./filebrowser"
mv "./filebrowser" "/usr/local/bin/filebrowser"
diff --git a/resources/tools/ungit.sh b/resources/tools/ungit.sh
index 1c9b7026..d08e2a60 100644
--- a/resources/tools/ungit.sh
+++ b/resources/tools/ungit.sh
@@ -17,7 +17,7 @@ done
if ! hash ungit 2>/dev/null; then
echo "Installing Ungit. Please wait..."
npm update
- npm install -g ungit@1.5.9
+ npm install -g ungit@1.5.13
else
echo "Ungit is already installed"
fi
diff --git a/resources/tools/vs-code-server.sh b/resources/tools/vs-code-server.sh
index 7ffc9428..e0ed6b49 100644
--- a/resources/tools/vs-code-server.sh
+++ b/resources/tools/vs-code-server.sh
@@ -21,8 +21,10 @@ if [ ! -f "/usr/local/bin/code-server" ]; then
# VS_CODE_VERSION=$CODE_SERVER_VERSION-vsc1.41.1
# wget -q https://github.com/cdr/code-server/releases/download/$CODE_SERVER_VERSION/code-server$VS_CODE_VERSION-linux-x86_64.tar.gz -O ./vscode-web.tar.gz
# Use older version, since newer has some problems with python extension
- VS_CODE_VERSION=3.4.1
- wget -q https://github.com/cdr/code-server/releases/download/$VS_CODE_VERSION/code-server_${VS_CODE_VERSION}_amd64.deb -O ./code-server.deb
+ # Todo: Check if update from 3.4.1 causes problems
+ # Todo: Remove comment if no problem
+ VS_CODE_VERSION=3.7.1
+ wget -q https://github.com/cdr/code-server/releases/download/$VS_CODE_VERSION/code-server_${VS_CODE_VERSION}_amd64.deb -O ./code-server.deb
apt-get update
apt-get install -y ./code-server.deb
rm ./code-server.deb
From b207ee300d0b704f3c9c05e85f692b3dde4182a9 Mon Sep 17 00:00:00 2001
From: Benjamin Raethlein
Date: Thu, 19 Nov 2020 23:01:06 +0100
Subject: [PATCH 037/293] Use more stable install of code-server extensions
---
Dockerfile | 122 ++++++++++++++++++++++++++++-------------------------
1 file changed, 64 insertions(+), 58 deletions(-)
diff --git a/Dockerfile b/Dockerfile
index b182559b..7e268edb 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -63,7 +63,7 @@ RUN \
apt-get upgrade -y && \
apt-get update && \
apt-get install -y --no-install-recommends \
- # This is necessary for apt to access HTTPS sources:
+ # This is necessary for apt to access HTTPS sources:
apt-transport-https \
gnupg-agent \
gpg-agent \
@@ -286,7 +286,7 @@ RUN wget --no-verbose https://repo.anaconda.com/miniconda/Miniconda3-py37_${COND
ENV PATH=$CONDA_DIR/bin:$PATH
# There is nothing added yet to LD_LIBRARY_PATH, so we can overwrite
-ENV LD_LIBRARY_PATH=$CONDA_DIR/lib
+ENV LD_LIBRARY_PATH=$CONDA_DIR/lib
# Install node.js
RUN \
@@ -294,7 +294,7 @@ RUN \
# https://nodejs.org/en/about/releases/ use even numbered releases
curl -sL https://deb.nodesource.com/setup_12.x | sudo -E bash - && \
apt-get install -y nodejs && \
- # As conda is first in path, the commands 'node' and 'npm' reference to the version of conda.
+ # As conda is first in path, the commands 'node' and 'npm' reference to the version of conda.
# Replace those versions with the newly installed versions of node
rm -f /opt/conda/bin/node && ln -s /usr/bin/node /opt/conda/bin/node && \
rm -f /opt/conda/bin/npm && ln -s /usr/bin/npm /opt/conda/bin/npm && \
@@ -310,7 +310,7 @@ RUN \
echo "deb https://dl.yarnpkg.com/debian/ stable main" | sudo tee /etc/apt/sources.list.d/yarn.list && \
apt-get update && \
apt-get install -y --no-install-recommends yarn && \
- # Install typescript
+ # Install typescript
/usr/bin/npm install -g typescript && \
# Install webpack - 32 MB
/usr/bin/npm install -g webpack && \
@@ -328,7 +328,7 @@ RUN \
# Cleanup
clean-layer.sh
-ENV JAVA_HOME="/usr/lib/jvm/java-11-openjdk-amd64"
+ENV JAVA_HOME="/usr/lib/jvm/java-11-openjdk-amd64"
# TODO add MAVEN_HOME?
### END RUNTIMES ###
@@ -409,7 +409,7 @@ RUN \
apt-get install -y --no-install-recommends gdebi && \
# Search for files
apt-get install -y --no-install-recommends catfish && \
- # TODO: Unable to locate package: apt-get install -y --no-install-recommends gnome-search-tool &&
+ # TODO: Unable to locate package: apt-get install -y --no-install-recommends gnome-search-tool &&
apt-get install -y --no-install-recommends font-manager && \
# vs support for thunar
apt-get install -y thunar-vcs-plugin && \
@@ -444,13 +444,13 @@ RUN \
ln -s /usr/bin/chromium-browser /usr/bin/google-chrome && \
# Cleanup
# Large package: gnome-user-guide 50MB app-install-data 50MB
- apt-get remove -y app-install-data gnome-user-guide && \
+ apt-get remove -y app-install-data gnome-user-guide && \
clean-layer.sh
# Add the defaults from /lib/x86_64-linux-gnu, otherwise lots of no version errors
# cannot be added above otherwise there are errors in the installation of the gui tools
# Call order: https://unix.stackexchange.com/questions/367600/what-is-the-order-that-linuxs-dynamic-linker-searches-paths-in
-ENV LD_LIBRARY_PATH=/lib/x86_64-linux-gnu:/usr/lib/x86_64-linux-gnu:$CONDA_DIR/lib
+ENV LD_LIBRARY_PATH=/lib/x86_64-linux-gnu:/usr/lib/x86_64-linux-gnu:$CONDA_DIR/lib
# Install Web Tools - Offered via Jupyter Tooling Plugin
@@ -520,8 +520,8 @@ RUN \
COPY resources/libraries ${RESOURCES_PATH}/libraries
### Install main data science libs
-RUN \
- # Link Conda - All python are linke to the conda instances
+RUN \
+ # Link Conda - All python are linke to the conda instances
# Linking python 3 crashes conda -> cannot install anyting - remove instead
#ln -s -f $CONDA_DIR/bin/python /usr/bin/python3 && \
# if removed -> cannot use add-apt-repository
@@ -531,7 +531,7 @@ RUN \
apt-get update && \
# upgrade pip
pip install --upgrade pip && \
- # If minimal flavor - install
+ # If minimal flavor - install
if [ "$WORKSPACE_FLAVOR" = "minimal" ]; then \
# Install nomkl - mkl needs lots of space
conda install -y --update-all nomkl ; \
@@ -620,7 +620,7 @@ RUN \
apt-get install -y libtesseract-dev && \
# Install libjpeg turbo for speedup in image processing
conda install -y libjpeg-turbo && \
- # Faiss - A library for efficient similarity search and clustering of dense vectors.
+ # Faiss - A library for efficient similarity search and clustering of dense vectors.
conda install -y -c pytorch faiss-cpu && \
# Install full pip requirements
pip install --no-cache-dir --upgrade -r ${RESOURCES_PATH}/libraries/requirements-full.txt && \
@@ -634,7 +634,7 @@ RUN \
# Fix conda version
RUN \
- # Conda installs wrong node version - relink conda node to the actual node
+ # Conda installs wrong node version - relink conda node to the actual node
rm -f /opt/conda/bin/node && ln -s /usr/bin/node /opt/conda/bin/node && \
rm -f /opt/conda/bin/npm && ln -s /usr/bin/npm /opt/conda/bin/npm
@@ -693,7 +693,7 @@ RUN \
jupyter nbextension enable --py --sys-prefix qgrid && \
# Activate Colab support
jupyter serverextension enable --py jupyter_http_over_ws && \
- # Activate Voila Rendering
+ # Activate Voila Rendering
# currently not working jupyter serverextension enable voila --sys-prefix && \
# Enable ipclusters
ipcluster nbextension enable && \
@@ -725,7 +725,7 @@ RUN \
$lab_ext_install jupyterlab_tensorboard && \
# install jupyterlab git
$lab_ext_install @jupyterlab/git && \
- pip install jupyterlab-git && \
+ pip install jupyterlab-git && \
jupyter serverextension enable --py jupyterlab_git && \
# For Matplotlib: https://github.com/matplotlib/jupyter-matplotlib
$lab_ext_install jupyter-matplotlib && \
@@ -808,60 +808,66 @@ RUN \
cd $RESOURCES_PATH && \
mkdir -p $HOME/.vscode/extensions/ && \
# Install python extension - (newer versions are 30MB bigger)
- VS_PYTHON_VERSION="2020.7.96456" && \
- wget --no-verbose https://github.com/microsoft/vscode-python/releases/download/$VS_PYTHON_VERSION/ms-python-release.vsix && \
- bsdtar -xf ms-python-release.vsix extension && \
- rm ms-python-release.vsix && \
- mv extension $HOME/.vscode/extensions/ms-python.python-$VS_PYTHON_VERSION && \
- sleep $SLEEP_TIMER && \
+ VS_PYTHON_VERSION="2020.7.96456" \
+ # wget --no-verbose https://github.com/microsoft/vscode-python/releases/download/$VS_PYTHON_VERSION/ms-python-release.vsix && \
+ # bsdtar -xf ms-python-release.vsix extension && \
+ # rm ms-python-release.vsix && \
+ # mv extension $HOME/.vscode/extensions/ms-python.python-$VS_PYTHON_VERSION && \
+ && code-server --install-extension ms-python.python@$VS_PYTHON_VERSION \
+ && sleep $SLEEP_TIMER && \
# Install vscode-java: https://github.com/redhat-developer/vscode-java/releases
# higher versions do not support vs code 1.39
- VS_JAVA_VERSION="0.65.0" && \
+ VS_JAVA_VERSION="0.61.0" \
# wget --quiet --no-check-certificate https://github.com/redhat-developer/vscode-java/releases/download/v$VS_JAVA_VERSION/redhat.java-$VS_JAVA_VERSION.vsix && \
- wget --no-verbose -O redhat.java-$VS_JAVA_VERSION.vsix https://marketplace.visualstudio.com/_apis/public/gallery/publishers/redhat/vsextensions/java/$VS_JAVA_VERSION/vspackage && \
- bsdtar -xf redhat.java-$VS_JAVA_VERSION.vsix extension && \
- rm redhat.java-$VS_JAVA_VERSION.vsix && \
- mv extension $HOME/.vscode/extensions/redhat.java-$VS_JAVA_VERSION && \
+ # wget --no-verbose -O redhat.java-$VS_JAVA_VERSION.vsix https://marketplace.visualstudio.com/_apis/public/gallery/publishers/redhat/vsextensions/java/$VS_JAVA_VERSION/vspackage && \
+ # bsdtar -xf redhat.java-$VS_JAVA_VERSION.vsix extension && \
+ # rm redhat.java-$VS_JAVA_VERSION.vsix && \
+ # mv extension $HOME/.vscode/extensions/redhat.java-$VS_JAVA_VERSION && \
+ && code-server --install-extension redhat.java@$VS_JAVA_VERSION \
# If light flavor -> exit here
- if [ "$WORKSPACE_FLAVOR" = "light" ]; then \
+ && if [ "$WORKSPACE_FLAVOR" = "light" ]; then \
exit 0 ; \
fi && \
sleep $SLEEP_TIMER && \
# Install git lens: https://github.com/eamodio/vscode-gitlens
- VS_GITLENS_VERSION="10.2.2" && \
- wget --no-verbose https://github.com/eamodio/vscode-gitlens/releases/download/v$VS_GITLENS_VERSION/gitlens-$VS_GITLENS_VERSION.vsix && \
- bsdtar -xf gitlens-$VS_GITLENS_VERSION.vsix extension && \
- rm gitlens-$VS_GITLENS_VERSION.vsix && \
- mv extension $HOME/.vscode/extensions/eamodio.gitlens-$VS_GITLENS_VERSION && \
+ VS_GITLENS_VERSION="10.2.2" \
+ # wget --no-verbose https://github.com/eamodio/vscode-gitlens/releases/download/v$VS_GITLENS_VERSION/gitlens-$VS_GITLENS_VERSION.vsix && \
+ # bsdtar -xf gitlens-$VS_GITLENS_VERSION.vsix extension && \
+ # rm gitlens-$VS_GITLENS_VERSION.vsix && \
+ # mv extension $HOME/.vscode/extensions/eamodio.gitlens-$VS_GITLENS_VERSION && \
+ && code-server --install-extension eamodio.gitlens@$VS_GITLENS_VERSION \
# Install code runner: https://github.com/formulahendry/vscode-code-runner/releases/latest
- VS_CODE_RUNNER_VERSION="0.9.17" && \
- wget --no-verbose https://github.com/formulahendry/vscode-code-runner/releases/download/$VS_CODE_RUNNER_VERSION/code-runner-$VS_CODE_RUNNER_VERSION.vsix && \
- bsdtar -xf code-runner-$VS_CODE_RUNNER_VERSION.vsix extension && \
- rm code-runner-$VS_CODE_RUNNER_VERSION.vsix && \
- mv extension $HOME/.vscode/extensions/code-runner-$VS_CODE_RUNNER_VERSION && \
- sleep $SLEEP_TIMER && \
+ && VS_CODE_RUNNER_VERSION="0.9.17" \
+ # wget --no-verbose https://github.com/formulahendry/vscode-code-runner/releases/download/$VS_CODE_RUNNER_VERSION/code-runner-$VS_CODE_RUNNER_VERSION.vsix && \
+ # bsdtar -xf code-runner-$VS_CODE_RUNNER_VERSION.vsix extension && \
+ # rm code-runner-$VS_CODE_RUNNER_VERSION.vsix && \
+ # mv extension $HOME/.vscode/extensions/code-runner-$VS_CODE_RUNNER_VERSION && \
+ && code-server --install-extension formulahendry.code-runner@$VS_CODE_RUNNER_VERSION \
+ && sleep $SLEEP_TIMER && \
# Install ESLint extension: https://marketplace.visualstudio.com/items?itemName=dbaeumer.vscode-eslint
# Older versions do not support vscode 1.39 - https://github.com/microsoft/vscode-eslint/
- VS_ESLINT_VERSION="2.1.13" && \
- wget --no-verbose https://marketplace.visualstudio.com/_apis/public/gallery/publishers/dbaeumer/vsextensions/vscode-eslint/$VS_ESLINT_VERSION/vspackage -O dbaeumer.vscode-eslint.vsix && \
- bsdtar -xf dbaeumer.vscode-eslint.vsix extension && \
- rm dbaeumer.vscode-eslint.vsix && \
- mv extension $HOME/.vscode/extensions/dbaeumer.vscode-eslint-$VS_ESLINT_VERSION.vsix && \
- sleep $SLEEP_TIMER && \
+ VS_ESLINT_VERSION="2.1.10" \
+ # wget --no-verbose https://marketplace.visualstudio.com/_apis/public/gallery/publishers/dbaeumer/vsextensions/vscode-eslint/$VS_ESLINT_VERSION/vspackage -O dbaeumer.vscode-eslint.vsix && \
+ # bsdtar -xf dbaeumer.vscode-eslint.vsix extension && \
+ # rm dbaeumer.vscode-eslint.vsix && \
+ # mv extension $HOME/.vscode/extensions/dbaeumer.vscode-eslint-$VS_ESLINT_VERSION.vsix && \
+ && code-server --install-extension dbaeumer.vscode-eslint@$VS_ESLINT_VERSION \
+ && sleep $SLEEP_TIMER && \
# Install Markdown lint extension: https://marketplace.visualstudio.com/items?itemName=DavidAnson.vscode-markdownlint
- VS_MARKDOWN_LINT_VERSION="0.37.2" && \
- wget --no-verbose https://marketplace.visualstudio.com/_apis/public/gallery/publishers/DavidAnson/vsextensions/vscode-markdownlint/$VS_MARKDOWN_LINT_VERSION/vspackage -O davidanson.vscode-markdownlint.vsix && \
- bsdtar -xf davidanson.vscode-markdownlint.vsix extension && \
- rm davidanson.vscode-markdownlint.vsix && \
- mv extension $HOME/.vscode/extensions/davidanson.vscode-markdownlint-$VS_MARKDOWN_LINT_VERSION.vsix && \
+ VS_MARKDOWN_LINT_VERSION="0.37.2" \
+ # wget --no-verbose https://marketplace.visualstudio.com/_apis/public/gallery/publishers/DavidAnson/vsextensions/vscode-markdownlint/$VS_MARKDOWN_LINT_VERSION/vspackage -O davidanson.vscode-markdownlint.vsix && \
+ # bsdtar -xf davidanson.vscode-markdownlint.vsix extension && \
+ # rm davidanson.vscode-markdownlint.vsix && \
+ # mv extension $HOME/.vscode/extensions/davidanson.vscode-markdownlint-$VS_MARKDOWN_LINT_VERSION.vsix && \
+ && code-server --install-extension davidanson.vscode-markdownlint@$VS_MARKDOWN_LINT_VERSION \
# Fix permissions
- fix-permissions.sh $HOME/.vscode/extensions/ && \
+ && fix-permissions.sh $HOME/.vscode/extensions/ && \
# Cleanup
clean-layer.sh
### END VSCODE ###
-### INCUBATION ZONE ###
+### INCUBATION ZONE ###
RUN \
apt-get update && \
@@ -876,7 +882,7 @@ RUN \
# apt-get install -y libasound2-dev libjack-dev && \
# libproj-dev required for cartopy (15MB)
# apt-get install -y libproj-dev && \
- # mysql server: 150MB
+ # mysql server: 150MB
# apt-get install -y mysql-server && \
# If minimal or light flavor -> exit here
if [ "$WORKSPACE_FLAVOR" = "minimal" ] || [ "$WORKSPACE_FLAVOR" = "light" ]; then \
@@ -1021,7 +1027,7 @@ RUN \
# MKL and Hardware Optimization
# Fix problem with MKL with duplicated libiomp5: https://github.com/dmlc/xgboost/issues/1715
-# Alternative - use openblas instead of Intel MKL: conda install -y nomkl
+# Alternative - use openblas instead of Intel MKL: conda install -y nomkl
# http://markus-beuckelmann.de/blog/boosting-numpy-blas.html
# MKL:
# https://software.intel.com/en-us/articles/tips-to-improve-performance-for-popular-deep-learning-frameworks-on-multi-core-cpus
@@ -1054,7 +1060,7 @@ ENV CONFIG_BACKUP_ENABLED="true" \
WORKSPACE_PORT="8080" \
# Set zsh as default shell (e.g. in jupyter)
SHELL="/usr/bin/zsh" \
- # Fix dark blue color for ls command (unreadable):
+ # Fix dark blue color for ls command (unreadable):
# https://askubuntu.com/questions/466198/how-do-i-change-the-color-for-directories-with-ls-in-the-console
# USE default LS_COLORS - Dont set LS COLORS - overwritten in zshrc
# LS_COLORS="" \
@@ -1062,7 +1068,7 @@ ENV CONFIG_BACKUP_ENABLED="true" \
# this can be problematic since docker restricts CPUs by stil showing all
MAX_NUM_THREADS="auto"
-### END CONFIGURATION ###
+### END CONFIGURATION ###
ARG ARG_BUILD_DATE="unknown"
ARG ARG_VCS_REF="unknown"
ARG ARG_WORKSPACE_VERSION="unknown"
@@ -1092,7 +1098,7 @@ LABEL \
"org.opencontainers.image.vendor"="ML Tooling" \
"org.opencontainers.image.authors"="Lukas Masuch & Benjamin Raethlein" \
"org.opencontainers.image.revision"=$ARG_VCS_REF \
- "org.opencontainers.image.created"=$ARG_BUILD_DATE \
+ "org.opencontainers.image.created"=$ARG_BUILD_DATE \
# Label Schema Convention (deprecated): http://label-schema.org/rc1/
"org.label-schema.name"="Machine Learning Workspace" \
"org.label-schema.description"="All-in-one web-based development environment for machine learning." \
@@ -1115,7 +1121,7 @@ LABEL \
# use global option with tini to kill full process groups: https://github.com/krallin/tini#process-group-killing
ENTRYPOINT ["/tini", "-g", "--"]
-CMD ["python", "/resources/docker-entrypoint.py"]
+CMD ["python", "/resources/docker-entrypoint.py"]
# Port 8080 is the main access port (also includes SSH)
# Port 5091 is the VNC port
@@ -1124,4 +1130,4 @@ CMD ["python", "/resources/docker-entrypoint.py"]
# See supervisor.conf for more ports
EXPOSE 8080
-###
\ No newline at end of file
+###
From c2ff1634f9aa788f3a716dae31e544684313d7d5 Mon Sep 17 00:00:00 2001
From: Jan Kalkan
Date: Fri, 20 Nov 2020 10:32:37 +0100
Subject: [PATCH 038/293] Update conda packages
---
Dockerfile | 10 +++++-----
1 file changed, 5 insertions(+), 5 deletions(-)
diff --git a/Dockerfile b/Dockerfile
index 9f8894a3..712f6469 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -568,9 +568,9 @@ RUN \
cmake \
joblib \
Pillow \
- 'ipython=7.16.*' \
- 'notebook=6.0.*' \
- 'jupyterlab=2.1.*' \
+ 'ipython=7.19.*' \
+ 'notebook=6.1.*' \
+ 'jupyterlab=2.2.*' \
# Selected by library evaluation
networkx \
click \
@@ -602,9 +602,9 @@ RUN \
# Install numba
conda install -y numba && \
# Install tensorflow - cpu only - mkl support
- conda install -y 'tensorflow=2.0.*' && \
+ conda install -y 'tensorflow=2.3.*' && \
# Install pytorch - cpu only
- conda install -y -c pytorch "pytorch==1.4.*" torchvision cpuonly && \
+ conda install -y -c pytorch "pytorch==1.7.*" torchvision cpuonly && \
# Install light pip requirements
pip install --no-cache-dir --upgrade -r ${RESOURCES_PATH}/libraries/requirements-light.txt && \
# If light light flavor - exit here
From 082b70e74548639fe4f66c64c30077eebd9d1249 Mon Sep 17 00:00:00 2001
From: Jan Kalkan
Date: Fri, 20 Nov 2020 11:07:39 +0100
Subject: [PATCH 039/293] Update VS-code extensions
---
Dockerfile | 5 +++--
1 file changed, 3 insertions(+), 2 deletions(-)
diff --git a/Dockerfile b/Dockerfile
index 712f6469..f69224c5 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -820,7 +820,8 @@ RUN \
sleep $SLEEP_TIMER && \
# Install vscode-java: https://github.com/redhat-developer/vscode-java/releases
# higher versions do not support vs code 1.39
- VS_JAVA_VERSION="0.65.0" && \
+ # Todo: Check if comment is obsolete and can be updated / removed
+ VS_JAVA_VERSION="0.61.0" && \
# wget --quiet --no-check-certificate https://github.com/redhat-developer/vscode-java/releases/download/v$VS_JAVA_VERSION/redhat.java-$VS_JAVA_VERSION.vsix && \
wget --no-verbose -O redhat.java-$VS_JAVA_VERSION.vsix https://marketplace.visualstudio.com/_apis/public/gallery/publishers/redhat/vsextensions/java/$VS_JAVA_VERSION/vspackage && \
bsdtar -xf redhat.java-$VS_JAVA_VERSION.vsix extension && \
@@ -846,7 +847,7 @@ RUN \
sleep $SLEEP_TIMER && \
# Install ESLint extension: https://marketplace.visualstudio.com/items?itemName=dbaeumer.vscode-eslint
# Older versions do not support vscode 1.39 - https://github.com/microsoft/vscode-eslint/
- VS_ESLINT_VERSION="2.1.8" && \
+ VS_ESLINT_VERSION="2.1.13" && \
wget --no-verbose https://marketplace.visualstudio.com/_apis/public/gallery/publishers/dbaeumer/vsextensions/vscode-eslint/$VS_ESLINT_VERSION/vspackage -O dbaeumer.vscode-eslint.vsix && \
bsdtar -xf dbaeumer.vscode-eslint.vsix extension && \
rm dbaeumer.vscode-eslint.vsix && \
From 2c9448d9069b1c90a9b7d3fa69f0838c06a4bd48 Mon Sep 17 00:00:00 2001
From: Jan Kalkan
Date: Fri, 20 Nov 2020 11:44:50 +0100
Subject: [PATCH 040/293] Update tool installer scripts
---
docs/update-workspace-image.md | 3 +--
resources/tools/fasttext.sh | 8 ++++----
resources/tools/intellij.sh | 2 +-
resources/tools/metabase.sh | 2 +-
resources/tools/nteract.sh | 2 +-
resources/tools/pillow-simd.sh | 4 ++--
resources/tools/pycharm.sh | 6 +++---
resources/tools/r-studio-desktop.sh | 6 +++---
resources/tools/r-studio-server.sh | 2 +-
resources/tools/robo3t.sh | 6 +++---
resources/tools/sqlectron.sh | 4 ++--
11 files changed, 22 insertions(+), 23 deletions(-)
diff --git a/docs/update-workspace-image.md b/docs/update-workspace-image.md
index a3e8033e..49f0aab4 100644
--- a/docs/update-workspace-image.md
+++ b/docs/update-workspace-image.md
@@ -55,8 +55,7 @@ We plan to do a full workspace image update (all libraries and tools) about ever
- robo3t.sh: [latest release](https://github.com/Studio3T/robomongo/releases/latest)
- metabase.sh: [latest release](https://github.com/metabase/metabase/releases/latest)
- fasttext.sh: [latest release](https://github.com/facebookresearch/fastText/releases/latest)
- - kubernetes-client.sh: [kube-prompt release](https://github.com/c-bata/kube-prompt/releases/latest)
- - guacamole.sh: [latest relesase](https://guacamole.apache.org/releases/)
+ - kubernetes-utils.sh: [kube-prompt release](https://github.com/c-bata/kube-prompt/releases/latest)
7. Update `minimmal` and `light` flavor python libraries:
diff --git a/resources/tools/fasttext.sh b/resources/tools/fasttext.sh
index 6d4edbda..ebab3c78 100644
--- a/resources/tools/fasttext.sh
+++ b/resources/tools/fasttext.sh
@@ -16,10 +16,10 @@ if ! hash fasttext 2>/dev/null; then
echo "Installing Fasttext. Please wait..."
mkdir $RESOURCES_PATH"/fasttext"
cd $RESOURCES_PATH"/fasttext"
- wget https://github.com/facebookresearch/fastText/archive/v0.9.1.zip
- unzip -q v0.9.1.zip
- rm v0.9.1.zip
- cd fastText-0.9.1
+ wget https://github.com/facebookresearch/fastText/archive/v0.9.2.zip
+ unzip -q v0.9.2.zip
+ rm v0.9.2.zip
+ cd fastText-0.9.2
# Surpress output - if there is a problem remove to see logs > /dev/null
make > /dev/null
chmod -R a+rwx $RESOURCES_PATH"/fasttext"
diff --git a/resources/tools/intellij.sh b/resources/tools/intellij.sh
index b40c713e..2993aef3 100644
--- a/resources/tools/intellij.sh
+++ b/resources/tools/intellij.sh
@@ -15,7 +15,7 @@ done
if ! hash intellij-community 2>/dev/null; then
echo "Installing IntelliJ Community. Please wait..."
cd $RESOURCES_PATH
- wget https://download-cf.jetbrains.com/idea/ideaIC-2020.1.4.tar.gz -O ./ideaIC.tar.gz
+ wget https://download-cf.jetbrains.com/idea/ideaIC-2020.2.3.tar.gz -O ./ideaIC.tar.gz
tar xfz ideaIC.tar.gz
mv idea-* /opt/idea
rm ./ideaIC.tar.gz
diff --git a/resources/tools/metabase.sh b/resources/tools/metabase.sh
index 78adda1e..e828c995 100644
--- a/resources/tools/metabase.sh
+++ b/resources/tools/metabase.sh
@@ -17,7 +17,7 @@ done
if [ ! -f "$RESOURCES_PATH/metabase.jar" ]; then
cd $RESOURCES_PATH
echo "Installing Metabase. Please wait..."
- wget http://downloads.metabase.com/v0.34.1/metabase.jar
+ wget https://downloads.metabase.com/v0.37.2/metabase.jar
else
echo "Metabase is already installed"
fi
diff --git a/resources/tools/nteract.sh b/resources/tools/nteract.sh
index 83fd398f..4a4598c7 100644
--- a/resources/tools/nteract.sh
+++ b/resources/tools/nteract.sh
@@ -15,7 +15,7 @@ done
if ! hash nteract 2>/dev/null; then
echo "Installing Nteract. Please wait..."
cd $RESOURCES_PATH
- wget https://github.com/nteract/nteract/releases/download/v0.24.0/nteract_0.24.0_amd64.deb -O ./nteract.deb
+ wget https://github.com/nteract/nteract/releases/download/v0.26.0/nteract_0.26.0_amd64.deb -O ./nteract.deb
apt-get update
apt-get install -y ./nteract.deb
rm ./nteract.deb
diff --git a/resources/tools/pillow-simd.sh b/resources/tools/pillow-simd.sh
index 1db1e793..86078368 100644
--- a/resources/tools/pillow-simd.sh
+++ b/resources/tools/pillow-simd.sh
@@ -13,11 +13,11 @@ echo "Installing Pillow SIMD. Please wait..."
conda uninstall -y --force pillow pil jpeg libtiff libjpeg-turbo
pip uninstall -y pillow pil jpeg libtiff libjpeg-turbo
conda install -y --no-deps -c conda-forge libjpeg-turbo
-CFLAGS="${CFLAGS} -mavx2" pip install --upgrade --no-cache-dir --force-reinstall --no-binary :all: --compile pillow-simd==7.0.0.post2
+CFLAGS="${CFLAGS} -mavx2" pip install --upgrade --no-cache-dir --force-reinstall --no-binary :all: --compile pillow-simd==7.0.0.post3
conda install -y --no-deps jpeg libtiff
echo "This should return a version with post prefix if pillow-simd is used:"
python -c "from PIL import Image; print(Image.__version__)"
echo "This should return True of libjpeg-turbo is enabled:"
python -c "from PIL import features; print(features.check_feature('libjpeg_turbo'))"
-sleep 15
\ No newline at end of file
+sleep 15
diff --git a/resources/tools/pycharm.sh b/resources/tools/pycharm.sh
index c6492a71..e1036f2e 100644
--- a/resources/tools/pycharm.sh
+++ b/resources/tools/pycharm.sh
@@ -15,11 +15,11 @@ done
if ! hash pycharm-community 2>/dev/null; then
echo "Installing PyCharm Community. Please wait..."
cd /resources
- wget https://download-cf.jetbrains.com/python/pycharm-community-2020.1.4.tar.gz -O ./pycharm.tar.gz
+ wget https://download-cf.jetbrains.com/python/pycharm-community-2020.2.3.tar.gz -O ./pycharm.tar.gz
tar xfz ./pycharm.tar.gz
mv pycharm-* /opt/pycharm
rm ./pycharm.tar.gz
- ln -s /opt/pycharm/bin/pycharm.sh /usr/bin/pycharm-community
+ ln -s /opt/pycharm/bin/pycharm.sh /usr/bin/pycharm-community
printf "[Desktop Entry]\nEncoding=UTF-8\nName=PyCharm Community\nComment=Python IDE\nExec=pycharm-community\nIcon=/opt/pycharm/bin/pycharm.png\nTerminal=false\nStartupNotify=true\nType=Application\nCategories=Development;IDE;" > /usr/share/applications/pycharm.desktop
else
echo "PyCharm is already installed"
@@ -31,4 +31,4 @@ if [ $INSTALL_ONLY = 0 ] ; then
echo "PyCharm is a GUI application. Make sure to run this script only within the VNC Desktop."
pycharm-community
sleep 10
-fi
\ No newline at end of file
+fi
diff --git a/resources/tools/r-studio-desktop.sh b/resources/tools/r-studio-desktop.sh
index a38623fc..2195c12b 100644
--- a/resources/tools/r-studio-desktop.sh
+++ b/resources/tools/r-studio-desktop.sh
@@ -17,7 +17,7 @@ if ! hash rstudio 2>/dev/null; then
cd $RESOURCES_PATH
apt-get update
#apt-get install --yes r-base
- wget https://download1.rstudio.org/desktop/xenial/amd64/rstudio-1.2.5033-amd64.deb -O ./rstudio.deb
+ wget https://download1.rstudio.org/desktop/bionic/amd64/rstudio-1.3.1093-amd64.deb -O ./rstudio.deb
# ld library path makes problems
LD_LIBRARY_PATH="" gdebi --non-interactive ./rstudio.deb
rm ./rstudio.deb
@@ -31,11 +31,11 @@ nohup sleep 4 && chown root:root /tmp && chmod a+rwx /tmp &
# Run
if [ $INSTALL_ONLY = 0 ] ; then
echo "Run Rstudio Desktop"
- LD_LIBRARY_PATH="" rstudio --no-sandbox
+ LD_LIBRARY_PATH="" rstudio --no-sandbox
sleep 10
fi
-# Fix tmp permission
+# Fix tmp permission
sleep 5
chown root:root /tmp
chmod a+rwx /tmp
diff --git a/resources/tools/r-studio-server.sh b/resources/tools/r-studio-server.sh
index 7ed1648d..fff51bd2 100644
--- a/resources/tools/r-studio-server.sh
+++ b/resources/tools/r-studio-server.sh
@@ -20,7 +20,7 @@ if [ ! -f "/usr/lib/rstudio-server/bin/rserver" ]; then
# r-base and r-cairo (for displaying plots)
conda install -y -c r r-base r-cairo
apt-get update
- wget https://download2.rstudio.org/server/trusty/amd64/rstudio-server-1.2.5033-amd64.deb -O ./rstudio.deb
+ wget https://download2.rstudio.org/server/xenial/amd64/rstudio-server-1.3.1093-amd64.deb -O ./rstudio.deb
apt-get install -y ./rstudio.deb
rm ./rstudio.deb
# Rstudio Server cannot run via root -> create rstudio user
diff --git a/resources/tools/robo3t.sh b/resources/tools/robo3t.sh
index 43c02df4..d9e1f348 100644
--- a/resources/tools/robo3t.sh
+++ b/resources/tools/robo3t.sh
@@ -15,10 +15,10 @@ done
if ! hash robo3t 2>/dev/null; then
echo "Installing Robo3T. Please wait..."
cd $RESOURCES_PATH
- wget https://github.com/Studio3T/robomongo/releases/download/v1.3.1/robo3t-1.3.1-linux-x86_64-7419c406.tar.gz -O ./robomongo.tar.gz
+ wget https://github.com/Studio3T/robomongo/releases/download/v1.4.2/robo3t-1.4.2-linux-x86_64-8650949.tar.gz -O ./robomongo.tar.gz
tar xfz ./robomongo.tar.gz
- chmod a+rwx ./robo3t-1.3.1-linux-x86_64-7419c406/bin/robo3t
- ln -s $RESOURCES_PATH/robo3t-1.3.1-linux-x86_64-7419c406/bin/robo3t /usr/local/bin/robo3t
+ chmod a+rwx ./robo3t-1.4.2-linux-x86_64-7419c406/bin/robo3t
+ ln -s $RESOURCES_PATH/robo3t-1.4.2-linux-x86_64-7419c406/bin/robo3t /usr/local/bin/robo3t
rm ./robomongo.tar.gz
else
echo "Robo3T is already installed"
diff --git a/resources/tools/sqlectron.sh b/resources/tools/sqlectron.sh
index 46d9e12e..6450e1a7 100644
--- a/resources/tools/sqlectron.sh
+++ b/resources/tools/sqlectron.sh
@@ -17,7 +17,7 @@ if ! hash sqlectron 2>/dev/null; then
echo "Installing Sqlectron Term. Please wait..."
npm install -g sqlectron-term
echo "Installing Sqlectron GUI"
- wget https://github.com/sqlectron/sqlectron-gui/releases/download/v1.30.0/Sqlectron_1.30.0_amd64.deb -O ./sqlectron.deb
+ wget https://github.com/sqlectron/sqlectron-gui/releases/download/v1.32.1/Sqlectron_1.32.1_amd64.deb -O ./sqlectron.deb
apt-get update
apt-get install -y ./sqlectron.deb
rm ./sqlectron.deb
@@ -29,4 +29,4 @@ if [ $INSTALL_ONLY = 0 ] ; then
echo "Sqlectron is a GUI application. Make sure to run this script only within the VNC Desktop."
sqlectron
sleep 10
-fi
\ No newline at end of file
+fi
From d62952b76f95297ea6acfc2ed9de0b45c1a800f3 Mon Sep 17 00:00:00 2001
From: Benjamin Raethlein
Date: Fri, 20 Nov 2020 14:24:28 +0100
Subject: [PATCH 041/293] Rollback to former method of installing extensions
---
Dockerfile | 68 +++++++++++++++++++++++++++---------------------------
1 file changed, 34 insertions(+), 34 deletions(-)
diff --git a/Dockerfile b/Dockerfile
index 7e268edb..bf851bb9 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -800,7 +800,7 @@ RUN \
# https://github.com/cdr/code-server/issues/171
# Alternative install: /usr/local/bin/code-server --user-data-dir=$HOME/.config/Code/ --extensions-dir=$HOME/.vscode/extensions/ --install-extension ms-python-release && \
RUN \
- SLEEP_TIMER=10 && \
+ SLEEP_TIMER=20 && \
# If minimal flavor -> exit here
if [ "$WORKSPACE_FLAVOR" = "minimal" ]; then \
exit 0 ; \
@@ -808,58 +808,58 @@ RUN \
cd $RESOURCES_PATH && \
mkdir -p $HOME/.vscode/extensions/ && \
# Install python extension - (newer versions are 30MB bigger)
- VS_PYTHON_VERSION="2020.7.96456" \
- # wget --no-verbose https://github.com/microsoft/vscode-python/releases/download/$VS_PYTHON_VERSION/ms-python-release.vsix && \
- # bsdtar -xf ms-python-release.vsix extension && \
- # rm ms-python-release.vsix && \
- # mv extension $HOME/.vscode/extensions/ms-python.python-$VS_PYTHON_VERSION && \
- && code-server --install-extension ms-python.python@$VS_PYTHON_VERSION \
+ VS_PYTHON_VERSION="2020.11.371526539" \
+ && wget --no-verbose https://github.com/microsoft/vscode-python/releases/download/$VS_PYTHON_VERSION/ms-python-release.vsix && \
+ bsdtar -xf ms-python-release.vsix extension && \
+ rm ms-python-release.vsix && \
+ mv extension $HOME/.vscode/extensions/ms-python.python-$VS_PYTHON_VERSION \
+ # && code-server --install-extension ms-python.python@$VS_PYTHON_VERSION \
&& sleep $SLEEP_TIMER && \
# Install vscode-java: https://github.com/redhat-developer/vscode-java/releases
# higher versions do not support vs code 1.39
VS_JAVA_VERSION="0.61.0" \
- # wget --quiet --no-check-certificate https://github.com/redhat-developer/vscode-java/releases/download/v$VS_JAVA_VERSION/redhat.java-$VS_JAVA_VERSION.vsix && \
- # wget --no-verbose -O redhat.java-$VS_JAVA_VERSION.vsix https://marketplace.visualstudio.com/_apis/public/gallery/publishers/redhat/vsextensions/java/$VS_JAVA_VERSION/vspackage && \
- # bsdtar -xf redhat.java-$VS_JAVA_VERSION.vsix extension && \
- # rm redhat.java-$VS_JAVA_VERSION.vsix && \
- # mv extension $HOME/.vscode/extensions/redhat.java-$VS_JAVA_VERSION && \
- && code-server --install-extension redhat.java@$VS_JAVA_VERSION \
+ && wget --quiet --no-check-certificate https://github.com/redhat-developer/vscode-java/releases/download/v$VS_JAVA_VERSION/redhat.java-$VS_JAVA_VERSION.vsix && \
+ wget --no-verbose -O redhat.java-$VS_JAVA_VERSION.vsix https://marketplace.visualstudio.com/_apis/public/gallery/publishers/redhat/vsextensions/java/$VS_JAVA_VERSION/vspackage && \
+ bsdtar -xf redhat.java-$VS_JAVA_VERSION.vsix extension && \
+ rm redhat.java-$VS_JAVA_VERSION.vsix && \
+ mv extension $HOME/.vscode/extensions/redhat.java-$VS_JAVA_VERSION \
+ # && code-server --install-extension redhat.java@$VS_JAVA_VERSION \
# If light flavor -> exit here
&& if [ "$WORKSPACE_FLAVOR" = "light" ]; then \
exit 0 ; \
fi && \
sleep $SLEEP_TIMER && \
# Install git lens: https://github.com/eamodio/vscode-gitlens
- VS_GITLENS_VERSION="10.2.2" \
- # wget --no-verbose https://github.com/eamodio/vscode-gitlens/releases/download/v$VS_GITLENS_VERSION/gitlens-$VS_GITLENS_VERSION.vsix && \
- # bsdtar -xf gitlens-$VS_GITLENS_VERSION.vsix extension && \
- # rm gitlens-$VS_GITLENS_VERSION.vsix && \
- # mv extension $HOME/.vscode/extensions/eamodio.gitlens-$VS_GITLENS_VERSION && \
- && code-server --install-extension eamodio.gitlens@$VS_GITLENS_VERSION \
+ VS_GITLENS_VERSION="10.2.3" \
+ && wget --no-verbose https://github.com/eamodio/vscode-gitlens/releases/download/v$VS_GITLENS_VERSION/gitlens-$VS_GITLENS_VERSION.vsix && \
+ bsdtar -xf gitlens-$VS_GITLENS_VERSION.vsix extension && \
+ rm gitlens-$VS_GITLENS_VERSION.vsix && \
+ mv extension $HOME/.vscode/extensions/eamodio.gitlens-$VS_GITLENS_VERSION \
+ # && code-server --install-extension eamodio.gitlens@$VS_GITLENS_VERSION \
# Install code runner: https://github.com/formulahendry/vscode-code-runner/releases/latest
&& VS_CODE_RUNNER_VERSION="0.9.17" \
- # wget --no-verbose https://github.com/formulahendry/vscode-code-runner/releases/download/$VS_CODE_RUNNER_VERSION/code-runner-$VS_CODE_RUNNER_VERSION.vsix && \
- # bsdtar -xf code-runner-$VS_CODE_RUNNER_VERSION.vsix extension && \
- # rm code-runner-$VS_CODE_RUNNER_VERSION.vsix && \
- # mv extension $HOME/.vscode/extensions/code-runner-$VS_CODE_RUNNER_VERSION && \
- && code-server --install-extension formulahendry.code-runner@$VS_CODE_RUNNER_VERSION \
+ && wget --no-verbose https://github.com/formulahendry/vscode-code-runner/releases/download/$VS_CODE_RUNNER_VERSION/code-runner-$VS_CODE_RUNNER_VERSION.vsix && \
+ bsdtar -xf code-runner-$VS_CODE_RUNNER_VERSION.vsix extension && \
+ rm code-runner-$VS_CODE_RUNNER_VERSION.vsix && \
+ mv extension $HOME/.vscode/extensions/code-runner-$VS_CODE_RUNNER_VERSION \
+ # && code-server --install-extension formulahendry.code-runner@$VS_CODE_RUNNER_VERSION \
&& sleep $SLEEP_TIMER && \
# Install ESLint extension: https://marketplace.visualstudio.com/items?itemName=dbaeumer.vscode-eslint
# Older versions do not support vscode 1.39 - https://github.com/microsoft/vscode-eslint/
VS_ESLINT_VERSION="2.1.10" \
- # wget --no-verbose https://marketplace.visualstudio.com/_apis/public/gallery/publishers/dbaeumer/vsextensions/vscode-eslint/$VS_ESLINT_VERSION/vspackage -O dbaeumer.vscode-eslint.vsix && \
- # bsdtar -xf dbaeumer.vscode-eslint.vsix extension && \
- # rm dbaeumer.vscode-eslint.vsix && \
- # mv extension $HOME/.vscode/extensions/dbaeumer.vscode-eslint-$VS_ESLINT_VERSION.vsix && \
- && code-server --install-extension dbaeumer.vscode-eslint@$VS_ESLINT_VERSION \
+ && wget --no-verbose https://marketplace.visualstudio.com/_apis/public/gallery/publishers/dbaeumer/vsextensions/vscode-eslint/$VS_ESLINT_VERSION/vspackage -O dbaeumer.vscode-eslint.vsix && \
+ bsdtar -xf dbaeumer.vscode-eslint.vsix extension && \
+ rm dbaeumer.vscode-eslint.vsix && \
+ mv extension $HOME/.vscode/extensions/dbaeumer.vscode-eslint-$VS_ESLINT_VERSION.vsix \
+ # && code-server --install-extension dbaeumer.vscode-eslint@$VS_ESLINT_VERSION \
&& sleep $SLEEP_TIMER && \
# Install Markdown lint extension: https://marketplace.visualstudio.com/items?itemName=DavidAnson.vscode-markdownlint
VS_MARKDOWN_LINT_VERSION="0.37.2" \
- # wget --no-verbose https://marketplace.visualstudio.com/_apis/public/gallery/publishers/DavidAnson/vsextensions/vscode-markdownlint/$VS_MARKDOWN_LINT_VERSION/vspackage -O davidanson.vscode-markdownlint.vsix && \
- # bsdtar -xf davidanson.vscode-markdownlint.vsix extension && \
- # rm davidanson.vscode-markdownlint.vsix && \
- # mv extension $HOME/.vscode/extensions/davidanson.vscode-markdownlint-$VS_MARKDOWN_LINT_VERSION.vsix && \
- && code-server --install-extension davidanson.vscode-markdownlint@$VS_MARKDOWN_LINT_VERSION \
+ && wget --no-verbose https://marketplace.visualstudio.com/_apis/public/gallery/publishers/DavidAnson/vsextensions/vscode-markdownlint/$VS_MARKDOWN_LINT_VERSION/vspackage -O davidanson.vscode-markdownlint.vsix && \
+ bsdtar -xf davidanson.vscode-markdownlint.vsix extension && \
+ rm davidanson.vscode-markdownlint.vsix && \
+ mv extension $HOME/.vscode/extensions/davidanson.vscode-markdownlint-$VS_MARKDOWN_LINT_VERSION.vsix \
+ # && code-server --install-extension davidanson.vscode-markdownlint@$VS_MARKDOWN_LINT_VERSION \
# Fix permissions
&& fix-permissions.sh $HOME/.vscode/extensions/ && \
# Cleanup
From a33b91fd9cc9bd04132eb7c53cd001bb77dc9f64 Mon Sep 17 00:00:00 2001
From: Benjamin Raethlein
Date: Fri, 20 Nov 2020 14:25:26 +0100
Subject: [PATCH 042/293] Update code-server version
---
resources/tools/vs-code-server.sh | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/resources/tools/vs-code-server.sh b/resources/tools/vs-code-server.sh
index 7ffc9428..df394b73 100644
--- a/resources/tools/vs-code-server.sh
+++ b/resources/tools/vs-code-server.sh
@@ -21,8 +21,8 @@ if [ ! -f "/usr/local/bin/code-server" ]; then
# VS_CODE_VERSION=$CODE_SERVER_VERSION-vsc1.41.1
# wget -q https://github.com/cdr/code-server/releases/download/$CODE_SERVER_VERSION/code-server$VS_CODE_VERSION-linux-x86_64.tar.gz -O ./vscode-web.tar.gz
# Use older version, since newer has some problems with python extension
- VS_CODE_VERSION=3.4.1
- wget -q https://github.com/cdr/code-server/releases/download/$VS_CODE_VERSION/code-server_${VS_CODE_VERSION}_amd64.deb -O ./code-server.deb
+ VS_CODE_VERSION=3.7.2
+ wget -q https://github.com/cdr/code-server/releases/download/v$VS_CODE_VERSION/code-server_${VS_CODE_VERSION}_amd64.deb -O ./code-server.deb
apt-get update
apt-get install -y ./code-server.deb
rm ./code-server.deb
From 4bd9fc8af6a2fa187355cbb9b5e1ec78eb271fa4 Mon Sep 17 00:00:00 2001
From: Benjamin Raethlein
Date: Fri, 20 Nov 2020 14:40:50 +0100
Subject: [PATCH 043/293] Update build.pys due to new universal_build lib
---
build.py | 7 ++++---
gpu-flavor/build.py | 7 ++++---
r-flavor/build.py | 7 ++++---
spark-flavor/build.py | 7 ++++---
4 files changed, 16 insertions(+), 12 deletions(-)
diff --git a/build.py b/build.py
index cc0d1aad..d3d26ded 100644
--- a/build.py
+++ b/build.py
@@ -3,6 +3,7 @@
import subprocess
from universal_build import build_utils
+from universal_build.helpers import build_docker
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument(
@@ -93,7 +94,7 @@
+ build_date_build_arg
)
- completed_process = build_utils.build_docker_image(
+ completed_process = build_docker.build_docker_image(
service_name, version=args[build_utils.FLAG_VERSION], build_args=build_args
)
if completed_process.returncode > 0:
@@ -106,8 +107,8 @@
completed_process = build_utils.run("python ./tests/run.py", exit_on_error=True)
if args[build_utils.FLAG_RELEASE]:
- build_utils.release_docker_image(
+ build_docker.release_docker_image(
service_name,
args[build_utils.FLAG_VERSION],
- args[build_utils.FLAG_DOCKER_IMAGE_PREFIX],
+ args[build_docker.FLAG_DOCKER_IMAGE_PREFIX],
)
diff --git a/gpu-flavor/build.py b/gpu-flavor/build.py
index f1240a07..c8aaf1cc 100644
--- a/gpu-flavor/build.py
+++ b/gpu-flavor/build.py
@@ -3,6 +3,7 @@
import datetime
from universal_build import build_utils
+from universal_build.helpers import build_docker
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument(
@@ -74,15 +75,15 @@
+ build_date_build_arg
)
- completed_process = build_utils.build_docker_image(
+ completed_process = build_docker.build_docker_image(
COMPONENT_NAME, version=args[build_utils.FLAG_VERSION], build_args=build_args
)
if completed_process.returncode > 0:
build_utils.exit_process(1)
if args[build_utils.FLAG_RELEASE]:
- build_utils.release_docker_image(
+ build_docker.release_docker_image(
COMPONENT_NAME,
args[build_utils.FLAG_VERSION],
- args[build_utils.FLAG_DOCKER_IMAGE_PREFIX],
+ args[build_docker.FLAG_DOCKER_IMAGE_PREFIX],
)
diff --git a/r-flavor/build.py b/r-flavor/build.py
index ab247635..95566453 100644
--- a/r-flavor/build.py
+++ b/r-flavor/build.py
@@ -3,6 +3,7 @@
import datetime
from universal_build import build_utils
+from universal_build.helpers import build_docker
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument(
@@ -74,15 +75,15 @@
+ build_date_build_arg
)
- completed_process = build_utils.build_docker_image(
+ completed_process = build_docker.build_docker_image(
COMPONENT_NAME, version=args[build_utils.FLAG_VERSION], build_args=build_args
)
if completed_process.returncode > 0:
build_utils.exit_process(1)
if args[build_utils.FLAG_RELEASE]:
- build_utils.release_docker_image(
+ build_docker.release_docker_image(
COMPONENT_NAME,
args[build_utils.FLAG_VERSION],
- args[build_utils.FLAG_DOCKER_IMAGE_PREFIX],
+ args[build_docker.FLAG_DOCKER_IMAGE_PREFIX],
)
diff --git a/spark-flavor/build.py b/spark-flavor/build.py
index 0bce0844..a4288495 100644
--- a/spark-flavor/build.py
+++ b/spark-flavor/build.py
@@ -3,6 +3,7 @@
import datetime
from universal_build import build_utils
+from universal_build.helpers import build_docker
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument(
@@ -74,15 +75,15 @@
+ build_date_build_arg
)
- completed_process = build_utils.build_docker_image(
+ completed_process = build_docker.build_docker_image(
COMPONENT_NAME, version=args[build_utils.FLAG_VERSION], build_args=build_args
)
if completed_process.returncode > 0:
build_utils.exit_process(1)
if args[build_utils.FLAG_RELEASE]:
- build_utils.release_docker_image(
+ build_docker.release_docker_image(
COMPONENT_NAME,
args[build_utils.FLAG_VERSION],
- args[build_utils.FLAG_DOCKER_IMAGE_PREFIX],
+ args[build_docker.FLAG_DOCKER_IMAGE_PREFIX],
)
From 83096ec007f06491960a51ae31bfdd8bde5d33af Mon Sep 17 00:00:00 2001
From: Jan Kalkan
Date: Fri, 20 Nov 2020 15:05:52 +0100
Subject: [PATCH 044/293] Fix broken links
---
Dockerfile | 3 +-
resources/libraries/requirements-light.txt | 34 +++++++--------
resources/libraries/requirements-minimal.txt | 44 ++++++++++----------
resources/tools/filebrowser.sh | 3 +-
resources/tools/vs-code-server.sh | 4 +-
5 files changed, 43 insertions(+), 45 deletions(-)
diff --git a/Dockerfile b/Dockerfile
index f69224c5..1c97038e 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -250,8 +250,7 @@ ENV \
CONDA_PYTHON_DIR=/opt/conda/lib/python3.8 \
MINICONDA_VERSION=4.8.3 \
MINICONDA_MD5=d63adf39f2c220950a063e0529d4ff74 \
- CONDA_VERSION=4.9.1
-
+ CONDA_VERSION=4.8.3
RUN wget --no-verbose https://repo.anaconda.com/miniconda/Miniconda3-py38_${CONDA_VERSION}-Linux-x86_64.sh -O ~/miniconda.sh && \
echo "${MINICONDA_MD5} *miniconda.sh" | md5sum -c - && \
/bin/bash ~/miniconda.sh -b -p $CONDA_DIR && \
diff --git a/resources/libraries/requirements-light.txt b/resources/libraries/requirements-light.txt
index c83ddd63..c8c72007 100644
--- a/resources/libraries/requirements-light.txt
+++ b/resources/libraries/requirements-light.txt
@@ -2,28 +2,28 @@
## ML Frameworks
# tensorflow & pytorch are installed via conda
-keras==2.3.1 # Deep Learning for humans
-xgboost==0.90 # Scalable, Portable and Distributed Gradient Boosting
-scikit-learn==0.22.1 # A set of python modules for machine learning and data mining
-statsmodels==0.11.0 # Statistical modeling and econometrics in Python
+keras==2.4.3 # Deep Learning for humans
+xgboost==1.2.1 # Scalable, Portable and Distributed Gradient Boosting
+scikit-learn==0.23.2 # A set of python modules for machine learning and data mining
+statsmodels==0.12.1 # Statistical modeling and econometrics in Python
mxnet-mkl==1.5.1.post0 # MXNet is an ultra-scalable deep learning framework. This version uses MKLDNN.
## NLP
-spacy==2.2.3 # Industrial-strength Natural Language Processing (NLP) with Python and Cython
-nltk==3.4.5 # Natural Language Toolkit
-fasttext==0.9.1 # fasttext Python bindings, a library for efficient learning of word representations and sentence classification.
-beautifulsoup4==4.8.2 # Beautiful Soup is a library that makes it easy to scrape information from web pages.
-soupsieve==1.9.5 # A modern CSS selector implementation for Beautiful Soup.
+spacy==2.3.2 # Industrial-strength Natural Language Processing (NLP) with Python and Cython
+nltk==3.5 # Natural Language Toolkit
+fasttext==0.9.2 # fasttext Python bindings, a library for efficient learning of word representations and sentence classification.
+beautifulsoup4==4.9.3 # Beautiful Soup is a library that makes it easy to scrape information from web pages.
+soupsieve==2.0.1 # A modern CSS selector implementation for Beautiful Soup.
# Images
-opencv-python-headless==4.1.2.30 # Wrapper package for OpenCV python bindings.
-opencv-python==4.1.2.30 # Wrapper package for OpenCV python bindings.
-scikit-image==0.16.2 # Image processing routines for SciPy - version 0.14.1 is not compatible with numpy 16
+opencv-python-headless==4.4.0.46 # Wrapper package for OpenCV python bindings.
+opencv-python==4.4.0.46 # Wrapper package for OpenCV python bindings.
+scikit-image==0.17.2 # Image processing routines for SciPy - version 0.14.1 is not compatible with numpy 16
## Others
-pymongo==3.10.1 # Mongo Client
-fire==0.2.1 # A library for automatically generating command line interfaces.
+pymongo==3.11.1 # Mongo Client
+fire==0.3.1 # A library for automatically generating command line interfaces.
patsy==0.5.1 # A Python package for describing statistical models and for building design matrices.
-pandas-profiling==2.4.0 # Generate profile report for pandas DataFrame
-tensorboardX==2.0 # TensorBoardX lets you watch Tensors Flow without Tensorflow
-boto3==1.11.9 # The AWS SDK for Python
\ No newline at end of file
+pandas-profiling==2.9.0 # Generate profile report for pandas DataFrame
+tensorboardX==2.1 # TensorBoardX lets you watch Tensors Flow without Tensorflow
+boto3==1.16.22 # The AWS SDK for Python
diff --git a/resources/libraries/requirements-minimal.txt b/resources/libraries/requirements-minimal.txt
index 4bee1b7e..1c9535e2 100644
--- a/resources/libraries/requirements-minimal.txt
+++ b/resources/libraries/requirements-minimal.txt
@@ -1,46 +1,46 @@
# Minimum Requirements
## Utilities
-httpie==2.1.0 # HTTPie - a CLI, cURL-like tool for humans.
-cloudpickle==1.5.0 # Extended pickling support for Python objects
+httpie==2.3.0 # HTTPie - a CLI, cURL-like tool for humans.
+cloudpickle==1.6.0 # Extended pickling support for Python objects
msgpack==1.0.0 # MessagePack (de)serializer.
-msgpack-numpy==0.4.6 # Numpy data serialization using msgpack
+msgpack-numpy==0.4.7.1 # Numpy data serialization using msgpack
cysignals==1.10.2 # Interrupt and signal handling for Cython
-h5py==2.10.0 # Read and write HDF5 files from Python
-seaborn==0.10.1 # Python visualization library based on matplotlib - Basic
-SQLAlchemy==1.3.18 # Database Abstraction Library
-virtualenv==20.0.20 # Virtual Python Environment builder
-pytest==5.4.3 # pytest: simple powerful testing with Python
-autopep8==1.5.3 # A tool that automatically formats Python code to conform to the PEP 8 style guide
-flake8==3.8.3 # The modular source code checker: pep8, pyflakes and co
+h5py==3.1.0 # Read and write HDF5 files from Python
+seaborn==0.11.0 # Python visualization library based on matplotlib - Basic
+SQLAlchemy==1.3.20 # Database Abstraction Library
+virtualenv==20.1.0 # Virtual Python Environment builder
+pytest==6.1.2 # pytest: simple powerful testing with Python
+autopep8==1.5.4 # A tool that automatically formats Python code to conform to the PEP 8 style guide
+flake8==3.8.4 # The modular source code checker: pep8, pyflakes and co
black==19.10b0 # The uncompromising code formatter.
-pylint==2.5.3 # python code static checker
+pylint==2.6.0 # python code static checker
pycodestyle==2.6.0 # Python style guide checker
-pydocstyle==5.0.2 # Python docstring style checker
-sortedcontainers==2.2.2 # Sorted Containers -- Sorted List, Sorted Dict, Sorted Set
+pydocstyle==5.1.1 # Python docstring style checker
+sortedcontainers==2.3.0 # Sorted Containers -- Sorted List, Sorted Dict, Sorted Set
simplejson==3.17.2 # Simple, fast, extensible JSON encoder/decoder for Python
mock==4.0.2 # Rolling backport of unittest.mock for all Pythons
python-dateutil==2.8.1 # 2.8.0 required by botocore: Extensions to the standard Python datetime module
# ML libraries
-tensorboard==2.0.0 # TensorBoard lets you watch Tensors Flow
+tensorboard==2.4.0 # TensorBoard lets you watch Tensors Flow
## Glances
-psutil==5.6.7 # Cross-platform lib for process and system monitoring in Python.
-bottle==0.12.18 # Fast and simple WSGI-framework for small web-applications.
+psutil==5.7.3 # Cross-platform lib for process and system monitoring in Python.
+bottle==0.12.19 # Fast and simple WSGI-framework for small web-applications.
netifaces==0.10.9 # Portable network interface information.
-py-cpuinfo==5.0.0 # Get CPU info with pure Python 2 & 3
-glances==3.1.4.1 # A cross-platform curses-based monitoring tool
+py-cpuinfo==7.0.0 # Get CPU info with pure Python 2 & 3
+glances==3.1.5 # A cross-platform curses-based monitoring tool
pymdstat==0.4.2 # Python library to parse Linux /proc/mdstat
## Jupyter
-jupytext==1.5.2 # Jupyter notebooks as Markdown documents, Julia, Python or R scripts
+jupytext==1.7.1 # Jupyter notebooks as Markdown documents, Julia, Python or R scripts
nbresuse==0.3.6 # Simple Jupyter extension to show how much resources (RAM) your notebook is using
-ipympl==0.5.7 # Matplotlib Jupyter Extension
-jupyterhub==1.1.0 # JupyterHub: A multi-user server for Jupyter notebooks
+ipympl==0.5.8 # Matplotlib Jupyter Extension
+jupyterhub==1.2.1 # JupyterHub: A multi-user server for Jupyter notebooks
remote_ikernel==0.4.6 # Running IPython kernels through batch queues
jupyter_contrib_nbextensions==0.5.1 # A collection of Jupyter nbextensions.
jupyter_nbextensions_configurator==0.4.1 # jupyter serverextension providing configuration interfaces for nbextensions.
# jupyter-tensorboard==0.1.10 # Jupyter notebook integration for tensorboard.
git+https://github.com/cloudrainstar/jupyter_tensorboard.git # Use other version with support for tensorflow 2.X
-nbdime==2.0.0 # Diff and merge of Jupyter Notebooks
\ No newline at end of file
+nbdime==2.1.0 # Diff and merge of Jupyter Notebooks
diff --git a/resources/tools/filebrowser.sh b/resources/tools/filebrowser.sh
index c32f7640..a61fdfe8 100644
--- a/resources/tools/filebrowser.sh
+++ b/resources/tools/filebrowser.sh
@@ -18,8 +18,7 @@ if [ ! -f "/usr/local/bin/filebrowser" ]; then
echo "Installing Filebrowser. Please wait..."
mkdir -p $RESOURCES_PATH/filebrowser
cd $RESOURCES_PATH/filebrowser
- wget -q https://github.com/filebrowser/filebrowser/releases/download/v2.9.0/linux-amd64-filebrowser.tar.gz
- -O ./filebrowser.tar.gz
+ wget -q https://github.com/filebrowser/filebrowser/releases/download/v2.9.0/linux-amd64-filebrowser.tar.gz -O ./filebrowser.tar.gz
tar -xzf ./filebrowser.tar.gz
chmod +x "./filebrowser"
mv "./filebrowser" "/usr/local/bin/filebrowser"
diff --git a/resources/tools/vs-code-server.sh b/resources/tools/vs-code-server.sh
index e0ed6b49..5d3df752 100644
--- a/resources/tools/vs-code-server.sh
+++ b/resources/tools/vs-code-server.sh
@@ -23,8 +23,8 @@ if [ ! -f "/usr/local/bin/code-server" ]; then
# Use older version, since newer has some problems with python extension
# Todo: Check if update from 3.4.1 causes problems
# Todo: Remove comment if no problem
- VS_CODE_VERSION=3.7.1
- wget -q https://github.com/cdr/code-server/releases/download/$VS_CODE_VERSION/code-server_${VS_CODE_VERSION}_amd64.deb -O ./code-server.deb
+ VS_CODE_VERSION=3.7.2
+ wget -q https://github.com/cdr/code-server/releases/download/v$VS_CODE_VERSION/code-server_${VS_CODE_VERSION}_amd64.deb -O ./code-server.deb
apt-get update
apt-get install -y ./code-server.deb
rm ./code-server.deb
From 414fc4c9bebc318eacd5e9d332f4667961b081d9 Mon Sep 17 00:00:00 2001
From: Benjamin Raethlein
Date: Fri, 20 Nov 2020 15:06:41 +0100
Subject: [PATCH 045/293] Install pip with conda to fix 'missing pip' issue
---
Dockerfile | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/Dockerfile b/Dockerfile
index bf851bb9..35bcab88 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -270,7 +270,8 @@ RUN wget --no-verbose https://repo.anaconda.com/miniconda/Miniconda3-py37_${COND
# Link Conda
ln -s $CONDA_DIR/bin/python /usr/local/bin/python && \
ln -s $CONDA_DIR/bin/conda /usr/bin/conda && \
- # Update pip
+ # Update
+ $CONDA_DIR/bin/pip install -y pip && \
$CONDA_DIR/bin/pip install --upgrade pip && \
chmod -R a+rwx /usr/local/bin/ && \
# Cleanup - Remove all here since conda is not in path as of now
From 776a9d97bd0be852193ea5c174f7ad635ac9e327 Mon Sep 17 00:00:00 2001
From: Benjamin Raethlein
Date: Fri, 20 Nov 2020 15:16:14 +0100
Subject: [PATCH 046/293] Fix pip install command
---
Dockerfile | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/Dockerfile b/Dockerfile
index 35bcab88..7089d55a 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -271,7 +271,7 @@ RUN wget --no-verbose https://repo.anaconda.com/miniconda/Miniconda3-py37_${COND
ln -s $CONDA_DIR/bin/python /usr/local/bin/python && \
ln -s $CONDA_DIR/bin/conda /usr/bin/conda && \
# Update
- $CONDA_DIR/bin/pip install -y pip && \
+ $CONDA_DIR/bin/conda install -y pip && \
$CONDA_DIR/bin/pip install --upgrade pip && \
chmod -R a+rwx /usr/local/bin/ && \
# Cleanup - Remove all here since conda is not in path as of now
From 7fe6722f056d07a4799f3f6bb299fc0890dbcfbe Mon Sep 17 00:00:00 2001
From: Benjamin Raethlein
Date: Fri, 20 Nov 2020 17:13:40 +0100
Subject: [PATCH 047/293] Let wget retry fetching the VS Code extension
---
Dockerfile | 9 +++++----
1 file changed, 5 insertions(+), 4 deletions(-)
diff --git a/Dockerfile b/Dockerfile
index 7089d55a..fdbf5ca4 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -820,7 +820,7 @@ RUN \
# higher versions do not support vs code 1.39
VS_JAVA_VERSION="0.61.0" \
&& wget --quiet --no-check-certificate https://github.com/redhat-developer/vscode-java/releases/download/v$VS_JAVA_VERSION/redhat.java-$VS_JAVA_VERSION.vsix && \
- wget --no-verbose -O redhat.java-$VS_JAVA_VERSION.vsix https://marketplace.visualstudio.com/_apis/public/gallery/publishers/redhat/vsextensions/java/$VS_JAVA_VERSION/vspackage && \
+ # wget --no-verbose -O redhat.java-$VS_JAVA_VERSION.vsix https://marketplace.visualstudio.com/_apis/public/gallery/publishers/redhat/vsextensions/java/$VS_JAVA_VERSION/vspackage && \
bsdtar -xf redhat.java-$VS_JAVA_VERSION.vsix extension && \
rm redhat.java-$VS_JAVA_VERSION.vsix && \
mv extension $HOME/.vscode/extensions/redhat.java-$VS_JAVA_VERSION \
@@ -847,8 +847,9 @@ RUN \
&& sleep $SLEEP_TIMER && \
# Install ESLint extension: https://marketplace.visualstudio.com/items?itemName=dbaeumer.vscode-eslint
# Older versions do not support vscode 1.39 - https://github.com/microsoft/vscode-eslint/
- VS_ESLINT_VERSION="2.1.10" \
- && wget --no-verbose https://marketplace.visualstudio.com/_apis/public/gallery/publishers/dbaeumer/vsextensions/vscode-eslint/$VS_ESLINT_VERSION/vspackage -O dbaeumer.vscode-eslint.vsix && \
+ VS_ESLINT_VERSION="2.1.9" \
+ # && wget --no-verbose https://marketplace.visualstudio.com/_apis/public/gallery/publishers/dbaeumer/vsextensions/vscode-eslint/$VS_ESLINT_VERSION/vspackage -O dbaeumer.vscode-eslint.vsix && \
+ && wget --no-verbose https://github.com/microsoft/vscode-eslint/releases/download/$VS_ESLINT_VERSION-insider.2/vscode-eslint-$VS_ESLINT_VERSION.vsix && \
bsdtar -xf dbaeumer.vscode-eslint.vsix extension && \
rm dbaeumer.vscode-eslint.vsix && \
mv extension $HOME/.vscode/extensions/dbaeumer.vscode-eslint-$VS_ESLINT_VERSION.vsix \
@@ -856,7 +857,7 @@ RUN \
&& sleep $SLEEP_TIMER && \
# Install Markdown lint extension: https://marketplace.visualstudio.com/items?itemName=DavidAnson.vscode-markdownlint
VS_MARKDOWN_LINT_VERSION="0.37.2" \
- && wget --no-verbose https://marketplace.visualstudio.com/_apis/public/gallery/publishers/DavidAnson/vsextensions/vscode-markdownlint/$VS_MARKDOWN_LINT_VERSION/vspackage -O davidanson.vscode-markdownlint.vsix && \
+ && wget --retry-on-http-error=429 --waitretry 10 --tries 5 --no-verbose https://marketplace.visualstudio.com/_apis/public/gallery/publishers/DavidAnson/vsextensions/vscode-markdownlint/$VS_MARKDOWN_LINT_VERSION/vspackage -O davidanson.vscode-markdownlint.vsix && \
bsdtar -xf davidanson.vscode-markdownlint.vsix extension && \
rm davidanson.vscode-markdownlint.vsix && \
mv extension $HOME/.vscode/extensions/davidanson.vscode-markdownlint-$VS_MARKDOWN_LINT_VERSION.vsix \
From 901bd8db8a5d9e887829ed4d65039ec3386fea3b Mon Sep 17 00:00:00 2001
From: Benjamin Raethlein
Date: Fri, 20 Nov 2020 18:36:33 +0100
Subject: [PATCH 048/293] Try more stable wget call to get vscode extensions
---
Dockerfile | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
diff --git a/Dockerfile b/Dockerfile
index fdbf5ca4..84470645 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -847,9 +847,9 @@ RUN \
&& sleep $SLEEP_TIMER && \
# Install ESLint extension: https://marketplace.visualstudio.com/items?itemName=dbaeumer.vscode-eslint
# Older versions do not support vscode 1.39 - https://github.com/microsoft/vscode-eslint/
- VS_ESLINT_VERSION="2.1.9" \
- # && wget --no-verbose https://marketplace.visualstudio.com/_apis/public/gallery/publishers/dbaeumer/vsextensions/vscode-eslint/$VS_ESLINT_VERSION/vspackage -O dbaeumer.vscode-eslint.vsix && \
- && wget --no-verbose https://github.com/microsoft/vscode-eslint/releases/download/$VS_ESLINT_VERSION-insider.2/vscode-eslint-$VS_ESLINT_VERSION.vsix && \
+ VS_ESLINT_VERSION="2.1.13" \
+ && wget --retry-on-http-error=429 --waitretry 15 --tries 5 --no-verbose https://marketplace.visualstudio.com/_apis/public/gallery/publishers/dbaeumer/vsextensions/vscode-eslint/$VS_ESLINT_VERSION/vspackage -O dbaeumer.vscode-eslint.vsix && \
+ # && wget --no-verbose https://github.com/microsoft/vscode-eslint/releases/download/$VS_ESLINT_VERSION-insider.2/vscode-eslint-$VS_ESLINT_VERSION.vsix -O dbaeumer.vscode-eslint.vsix && \
bsdtar -xf dbaeumer.vscode-eslint.vsix extension && \
rm dbaeumer.vscode-eslint.vsix && \
mv extension $HOME/.vscode/extensions/dbaeumer.vscode-eslint-$VS_ESLINT_VERSION.vsix \
@@ -857,7 +857,7 @@ RUN \
&& sleep $SLEEP_TIMER && \
# Install Markdown lint extension: https://marketplace.visualstudio.com/items?itemName=DavidAnson.vscode-markdownlint
VS_MARKDOWN_LINT_VERSION="0.37.2" \
- && wget --retry-on-http-error=429 --waitretry 10 --tries 5 --no-verbose https://marketplace.visualstudio.com/_apis/public/gallery/publishers/DavidAnson/vsextensions/vscode-markdownlint/$VS_MARKDOWN_LINT_VERSION/vspackage -O davidanson.vscode-markdownlint.vsix && \
+ && wget --retry-on-http-error=429 --waitretry 15 --tries 5 --no-verbose https://marketplace.visualstudio.com/_apis/public/gallery/publishers/DavidAnson/vsextensions/vscode-markdownlint/$VS_MARKDOWN_LINT_VERSION/vspackage -O davidanson.vscode-markdownlint.vsix && \
bsdtar -xf davidanson.vscode-markdownlint.vsix extension && \
rm davidanson.vscode-markdownlint.vsix && \
mv extension $HOME/.vscode/extensions/davidanson.vscode-markdownlint-$VS_MARKDOWN_LINT_VERSION.vsix \
From ceaf75713460d11d10740d72e8e07af764ef78c0 Mon Sep 17 00:00:00 2001
From: Jan Kalkan
Date: Fri, 20 Nov 2020 20:32:51 +0100
Subject: [PATCH 049/293] Update and patch noVNC
---
Dockerfile | 4 +-
resources/novnc/app/ui.js | 3709 +++++++++++++++++++------------------
resources/novnc/vnc.html | 765 +++++---
3 files changed, 2464 insertions(+), 2014 deletions(-)
diff --git a/Dockerfile b/Dockerfile
index 1c97038e..66edde54 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -387,8 +387,8 @@ RUN \
# use older version of websockify to prevent hanging connections on offline containers?, see https://github.com/ConSol/docker-headless-vnc-container/issues/50
# Todo: Check and remove previous comment since issue is closed and websockify seems already be updated to latest version?
# Todo: Check if still works after removal
- # wget -qO- https://github.com/novnc/websockify/archive/v0.9.0.tar.gz | tar xz --strip 1 -C ./novnc/utils/websockify && \
- # chmod +x -v ./novnc/utils/*.sh && \
+ wget -qO- https://github.com/novnc/websockify/archive/v0.9.0.tar.gz | tar xz --strip 1 -C ./novnc/utils/websockify && \
+ chmod +x -v ./novnc/utils/*.sh && \
# create user vnc directory
mkdir -p $HOME/.vnc && \
# Fix permissions
diff --git a/resources/novnc/app/ui.js b/resources/novnc/app/ui.js
index 813fedcf..204f1607 100644
--- a/resources/novnc/app/ui.js
+++ b/resources/novnc/app/ui.js
@@ -1,1804 +1,1989 @@
/*
* noVNC: HTML5 VNC client
- * Copyright (C) 2018 The noVNC Authors
+ * Copyright (C) 2019 The noVNC Authors
* Licensed under MPL 2.0 (see LICENSE.txt)
*
* See README.md for usage and integration instructions.
*/
-import * as Log from '../core/util/logging.js';
-import _, {
- l10n
-} from './localization.js';
+import * as Log from "../core/util/logging.js";
+import _, { l10n } from "./localization.js";
import {
- isTouchDevice,
- isSafari,
- isIOS,
- isAndroid,
- dragThreshold
-}
-from '../core/util/browser.js';
-import {
- setCapture,
- getPointerEvent
-} from '../core/util/events.js';
+ isTouchDevice,
+ isSafari,
+ hasScrollbarGutter,
+ dragThreshold,
+} from "../core/util/browser.js";
+import { setCapture, getPointerEvent } from "../core/util/events.js";
import KeyTable from "../core/input/keysym.js";
import keysyms from "../core/input/keysymdef.js";
import Keyboard from "../core/input/keyboard.js";
import RFB from "../core/rfb.js";
import * as WebUtil from "./webutil.js";
-const UI = {
-
- connected: false,
- desktopName: "",
-
- statusTimeout: null,
- hideKeyboardTimeout: null,
- idleControlbarTimeout: null,
- closeControlbarTimeout: null,
-
- controlbarGrabbed: false,
- controlbarDrag: false,
- controlbarMouseDownClientY: 0,
- controlbarMouseDownOffsetY: 0,
-
- lastKeyboardinput: null,
- defaultKeyboardinputLen: 100,
-
- inhibit_reconnect: true,
- reconnect_callback: null,
- reconnect_password: null,
-
- prime() {
- return WebUtil.initSettings().then(() => {
- if (document.readyState === "interactive" || document.readyState === "complete") {
- return UI.start();
- }
-
- return new Promise((resolve, reject) => {
- document.addEventListener('DOMContentLoaded', () => UI.start().then(resolve).catch(reject));
- });
- });
- },
-
- // Render default UI and initialize settings menu
- start() {
-
- UI.initSettings();
-
- // Translate the DOM
- l10n.translateDOM();
-
- // Adapt the interface for touch screen devices
- if (isTouchDevice) {
- document.documentElement.classList.add("noVNC_touch");
- // Remove the address bar
- setTimeout(() => window.scrollTo(0, 1), 100);
- }
-
- // Restore control bar position
- if (WebUtil.readSetting('controlbar_pos') === 'right') {
- UI.toggleControlbarSide();
- }
-
- UI.initFullscreen();
-
- // Setup event handlers
- UI.addControlbarHandlers();
- UI.addTouchSpecificHandlers();
- UI.addExtraKeysHandlers();
- UI.addMachineHandlers();
- UI.addConnectionControlHandlers();
- UI.addClipboardHandlers();
- UI.addSettingsHandlers();
- document.getElementById("noVNC_status")
- .addEventListener('click', UI.hideStatus);
-
- // Bootstrap fallback input handler
- UI.keyboardinputReset();
-
- UI.openControlbar();
-
- UI.updateVisualState('init');
-
- document.documentElement.classList.remove("noVNC_loading");
-
- let autoconnect = WebUtil.getConfigVar('autoconnect', false);
- if (autoconnect === 'true' || autoconnect == '1') {
- autoconnect = true;
- UI.connect();
- } else {
- autoconnect = false;
- // Show the connect panel on first load unless autoconnecting
- UI.openConnectPanel();
- }
-
- return Promise.resolve(UI.rfb);
- },
-
- initFullscreen() {
- // Only show the button if fullscreen is properly supported
- // * Safari doesn't support alphanumerical input while in fullscreen
- if (!isSafari() &&
- (document.documentElement.requestFullscreen ||
- document.documentElement.mozRequestFullScreen ||
- document.documentElement.webkitRequestFullscreen ||
- document.body.msRequestFullscreen)) {
- document.getElementById('noVNC_fullscreen_button')
- .classList.remove("noVNC_hidden");
- UI.addFullscreenHandlers();
- }
- },
-
- initSettings() {
- // Logging selection dropdown
- const llevels = ['error', 'warn', 'info', 'debug'];
- for (let i = 0; i < llevels.length; i += 1) {
- UI.addOption(document.getElementById('noVNC_setting_logging'), llevels[i], llevels[i]);
- }
-
- // Settings with immediate effects
- UI.initSetting('logging', 'warn');
- UI.updateLogging();
-
- // if port == 80 (or 443) then it won't be present and should be
- // set manually
- let port = window.location.port;
- if (!port) {
- if (window.location.protocol.substring(0, 5) == 'https') {
- port = 443;
- } else if (window.location.protocol.substring(0, 4) == 'http') {
- port = 80;
- }
- }
-
- /* Populate the controls if defaults are provided in the URL */
- UI.initSetting('host', window.location.hostname);
- UI.initSetting('port', port);
- UI.initSetting('encrypt', (window.location.protocol === "https:"));
- UI.initSetting('view_clip', false);
- UI.initSetting('shared', true);
- UI.initSetting('view_only', false);
- UI.initSetting('show_dot', false);
- UI.initSetting('repeaterID', '');
- UI.initSetting('reconnect_delay', 5000);
-
- // ######################## CUSTOM CODE ########################
- UI.initSetting('resize', 'remote');
- UI.initSetting('reconnect', true);
- // update settings cannot be used anymore to force settings
- WebUtil.setSetting('path', window.location.pathname.substring(0, window.location.pathname.lastIndexOf("/")).replace(/^\//, '') + '/websockify');
- // ######################## END CUSTOM CODE ########################
-
- UI.setupSettingLabels();
- },
- // Adds a link to the label elements on the corresponding input elements
- setupSettingLabels() {
- const labels = document.getElementsByTagName('LABEL');
- for (let i = 0; i < labels.length; i++) {
- const htmlFor = labels[i].htmlFor;
- if (htmlFor != '') {
- const elem = document.getElementById(htmlFor);
- if (elem) elem.label = labels[i];
- } else {
- // If 'for' isn't set, use the first input element child
- const children = labels[i].children;
- for (let j = 0; j < children.length; j++) {
- if (children[j].form !== undefined) {
- children[j].label = labels[i];
- break;
- }
- }
- }
- }
- },
-
- /* ------^-------
- * /INIT
- * ==============
- * EVENT HANDLERS
- * ------v------*/
-
- addControlbarHandlers() {
- document.getElementById("noVNC_control_bar")
- .addEventListener('mousemove', UI.activateControlbar);
- document.getElementById("noVNC_control_bar")
- .addEventListener('mouseup', UI.activateControlbar);
- document.getElementById("noVNC_control_bar")
- .addEventListener('mousedown', UI.activateControlbar);
- document.getElementById("noVNC_control_bar")
- .addEventListener('keydown', UI.activateControlbar);
-
- document.getElementById("noVNC_control_bar")
- .addEventListener('mousedown', UI.keepControlbar);
- document.getElementById("noVNC_control_bar")
- .addEventListener('keydown', UI.keepControlbar);
-
- document.getElementById("noVNC_view_drag_button")
- .addEventListener('click', UI.toggleViewDrag);
-
- document.getElementById("noVNC_control_bar_handle")
- .addEventListener('mousedown', UI.controlbarHandleMouseDown);
- document.getElementById("noVNC_control_bar_handle")
- .addEventListener('mouseup', UI.controlbarHandleMouseUp);
- document.getElementById("noVNC_control_bar_handle")
- .addEventListener('mousemove', UI.dragControlbarHandle);
- // resize events aren't available for elements
- window.addEventListener('resize', UI.updateControlbarHandle);
-
- const exps = document.getElementsByClassName("noVNC_expander");
- for (let i = 0; i < exps.length; i++) {
- exps[i].addEventListener('click', UI.toggleExpander);
- }
- },
-
- addTouchSpecificHandlers() {
- document.getElementById("noVNC_mouse_button0")
- .addEventListener('click', () => UI.setMouseButton(1));
- document.getElementById("noVNC_mouse_button1")
- .addEventListener('click', () => UI.setMouseButton(2));
- document.getElementById("noVNC_mouse_button2")
- .addEventListener('click', () => UI.setMouseButton(4));
- document.getElementById("noVNC_mouse_button4")
- .addEventListener('click', () => UI.setMouseButton(0));
- document.getElementById("noVNC_keyboard_button")
- .addEventListener('click', UI.toggleVirtualKeyboard);
-
- UI.touchKeyboard = new Keyboard(document.getElementById('noVNC_keyboardinput'));
- UI.touchKeyboard.onkeyevent = UI.keyEvent;
- UI.touchKeyboard.grab();
- document.getElementById("noVNC_keyboardinput")
- .addEventListener('input', UI.keyInput);
- document.getElementById("noVNC_keyboardinput")
- .addEventListener('focus', UI.onfocusVirtualKeyboard);
- document.getElementById("noVNC_keyboardinput")
- .addEventListener('blur', UI.onblurVirtualKeyboard);
- document.getElementById("noVNC_keyboardinput")
- .addEventListener('submit', () => false);
-
- document.documentElement
- .addEventListener('mousedown', UI.keepVirtualKeyboard, true);
-
- document.getElementById("noVNC_control_bar")
- .addEventListener('touchstart', UI.activateControlbar);
- document.getElementById("noVNC_control_bar")
- .addEventListener('touchmove', UI.activateControlbar);
- document.getElementById("noVNC_control_bar")
- .addEventListener('touchend', UI.activateControlbar);
- document.getElementById("noVNC_control_bar")
- .addEventListener('input', UI.activateControlbar);
-
- document.getElementById("noVNC_control_bar")
- .addEventListener('touchstart', UI.keepControlbar);
- document.getElementById("noVNC_control_bar")
- .addEventListener('input', UI.keepControlbar);
-
- document.getElementById("noVNC_control_bar_handle")
- .addEventListener('touchstart', UI.controlbarHandleMouseDown);
- document.getElementById("noVNC_control_bar_handle")
- .addEventListener('touchend', UI.controlbarHandleMouseUp);
- document.getElementById("noVNC_control_bar_handle")
- .addEventListener('touchmove', UI.dragControlbarHandle);
- },
-
- addExtraKeysHandlers() {
- document.getElementById("noVNC_toggle_extra_keys_button")
- .addEventListener('click', UI.toggleExtraKeys);
- document.getElementById("noVNC_toggle_ctrl_button")
- .addEventListener('click', UI.toggleCtrl);
- document.getElementById("noVNC_toggle_windows_button")
- .addEventListener('click', UI.toggleWindows);
- document.getElementById("noVNC_toggle_alt_button")
- .addEventListener('click', UI.toggleAlt);
- document.getElementById("noVNC_send_tab_button")
- .addEventListener('click', UI.sendTab);
- document.getElementById("noVNC_send_esc_button")
- .addEventListener('click', UI.sendEsc);
- document.getElementById("noVNC_send_ctrl_alt_del_button")
- .addEventListener('click', UI.sendCtrlAltDel);
- },
-
- addMachineHandlers() {
- document.getElementById("noVNC_shutdown_button")
- .addEventListener('click', () => UI.rfb.machineShutdown());
- document.getElementById("noVNC_reboot_button")
- .addEventListener('click', () => UI.rfb.machineReboot());
- document.getElementById("noVNC_reset_button")
- .addEventListener('click', () => UI.rfb.machineReset());
- document.getElementById("noVNC_power_button")
- .addEventListener('click', UI.togglePowerPanel);
- },
-
- addConnectionControlHandlers() {
- document.getElementById("noVNC_disconnect_button")
- .addEventListener('click', UI.disconnect);
- document.getElementById("noVNC_connect_button")
- .addEventListener('click', UI.connect);
- document.getElementById("noVNC_cancel_reconnect_button")
- .addEventListener('click', UI.cancelReconnect);
-
- document.getElementById("noVNC_password_button")
- .addEventListener('click', UI.setPassword);
- },
-
- addClipboardHandlers() {
- document.getElementById("noVNC_clipboard_button")
- .addEventListener('click', UI.toggleClipboardPanel);
- document.getElementById("noVNC_clipboard_text")
- .addEventListener('change', UI.clipboardSend);
- document.getElementById("noVNC_clipboard_clear_button")
- .addEventListener('click', UI.clipboardClear);
- },
-
- // Add a call to save settings when the element changes,
- // unless the optional parameter changeFunc is used instead.
- addSettingChangeHandler(name, changeFunc) {
- const settingElem = document.getElementById("noVNC_setting_" + name);
- if (changeFunc === undefined) {
- changeFunc = () => UI.saveSetting(name);
- }
- settingElem.addEventListener('change', changeFunc);
- },
-
- addSettingsHandlers() {
- document.getElementById("noVNC_settings_button")
- .addEventListener('click', UI.toggleSettingsPanel);
-
- UI.addSettingChangeHandler('encrypt');
- UI.addSettingChangeHandler('resize');
- UI.addSettingChangeHandler('resize', UI.applyResizeMode);
- UI.addSettingChangeHandler('resize', UI.updateViewClip);
- UI.addSettingChangeHandler('view_clip');
- UI.addSettingChangeHandler('view_clip', UI.updateViewClip);
- UI.addSettingChangeHandler('shared');
- UI.addSettingChangeHandler('view_only');
- UI.addSettingChangeHandler('view_only', UI.updateViewOnly);
- UI.addSettingChangeHandler('show_dot');
- UI.addSettingChangeHandler('show_dot', UI.updateShowDotCursor);
- UI.addSettingChangeHandler('host');
- UI.addSettingChangeHandler('port');
- UI.addSettingChangeHandler('path');
- UI.addSettingChangeHandler('repeaterID');
- UI.addSettingChangeHandler('logging');
- UI.addSettingChangeHandler('logging', UI.updateLogging);
- UI.addSettingChangeHandler('reconnect');
- UI.addSettingChangeHandler('reconnect_delay');
- },
-
- addFullscreenHandlers() {
- document.getElementById("noVNC_fullscreen_button")
- .addEventListener('click', UI.toggleFullscreen);
-
- window.addEventListener('fullscreenchange', UI.updateFullscreenButton);
- window.addEventListener('mozfullscreenchange', UI.updateFullscreenButton);
- window.addEventListener('webkitfullscreenchange', UI.updateFullscreenButton);
- window.addEventListener('msfullscreenchange', UI.updateFullscreenButton);
- },
-
- /* ------^-------
- * /EVENT HANDLERS
- * ==============
- * VISUAL
- * ------v------*/
-
- // Disable/enable controls depending on connection state
- updateVisualState(state) {
-
- document.documentElement.classList.remove("noVNC_connecting");
- document.documentElement.classList.remove("noVNC_connected");
- document.documentElement.classList.remove("noVNC_disconnecting");
- document.documentElement.classList.remove("noVNC_reconnecting");
-
- const transition_elem = document.getElementById("noVNC_transition_text");
- switch (state) {
- case 'init':
- break;
- case 'connecting':
- transition_elem.textContent = _("Connecting...");
- document.documentElement.classList.add("noVNC_connecting");
- break;
- case 'connected':
- document.documentElement.classList.add("noVNC_connected");
- break;
- case 'disconnecting':
- transition_elem.textContent = _("Disconnecting...");
- document.documentElement.classList.add("noVNC_disconnecting");
- break;
- case 'disconnected':
- break;
- case 'reconnecting':
- transition_elem.textContent = _("Reconnecting...");
- document.documentElement.classList.add("noVNC_reconnecting");
- break;
- default:
- Log.Error("Invalid visual state: " + state);
- UI.showStatus(_("Internal error"), 'error');
- return;
- }
-
- if (UI.connected) {
- UI.updateViewClip();
-
- UI.disableSetting('encrypt');
- UI.disableSetting('shared');
- UI.disableSetting('host');
- UI.disableSetting('port');
- UI.disableSetting('path');
- UI.disableSetting('repeaterID');
- UI.setMouseButton(1);
-
- // Hide the controlbar after 2 seconds
- UI.closeControlbarTimeout = setTimeout(UI.closeControlbar, 2000);
- } else {
- UI.enableSetting('encrypt');
- UI.enableSetting('shared');
- UI.enableSetting('host');
- UI.enableSetting('port');
- UI.enableSetting('path');
- UI.enableSetting('repeaterID');
- UI.updatePowerButton();
- UI.keepControlbar();
- }
-
- // State change closes the password dialog
- document.getElementById('noVNC_password_dlg')
- .classList.remove('noVNC_open');
- },
-
- showStatus(text, status_type, time) {
- const statusElem = document.getElementById('noVNC_status');
-
- clearTimeout(UI.statusTimeout);
-
- if (typeof status_type === 'undefined') {
- status_type = 'normal';
- }
-
- // Don't overwrite more severe visible statuses and never
- // errors. Only shows the first error.
- let visible_status_type = 'none';
- if (statusElem.classList.contains("noVNC_open")) {
- if (statusElem.classList.contains("noVNC_status_error")) {
- visible_status_type = 'error';
- } else if (statusElem.classList.contains("noVNC_status_warn")) {
- visible_status_type = 'warn';
- } else {
- visible_status_type = 'normal';
- }
- }
- if (visible_status_type === 'error' ||
- (visible_status_type === 'warn' && status_type === 'normal')) {
- return;
- }
-
- switch (status_type) {
- case 'error':
- statusElem.classList.remove("noVNC_status_warn");
- statusElem.classList.remove("noVNC_status_normal");
- statusElem.classList.add("noVNC_status_error");
- break;
- case 'warning':
- case 'warn':
- statusElem.classList.remove("noVNC_status_error");
- statusElem.classList.remove("noVNC_status_normal");
- statusElem.classList.add("noVNC_status_warn");
- break;
- case 'normal':
- case 'info':
- default:
- statusElem.classList.remove("noVNC_status_error");
- statusElem.classList.remove("noVNC_status_warn");
- statusElem.classList.add("noVNC_status_normal");
- break;
- }
-
- statusElem.textContent = text;
- statusElem.classList.add("noVNC_open");
-
- // If no time was specified, show the status for 1.5 seconds
- if (typeof time === 'undefined') {
- time = 1500;
- }
+const PAGE_TITLE = "noVNC";
- // Error messages do not timeout
- if (status_type !== 'error') {
- UI.statusTimeout = window.setTimeout(UI.hideStatus, time);
- }
- },
-
- hideStatus() {
- clearTimeout(UI.statusTimeout);
- document.getElementById('noVNC_status').classList.remove("noVNC_open");
- },
-
- activateControlbar(event) {
- clearTimeout(UI.idleControlbarTimeout);
- // We manipulate the anchor instead of the actual control
- // bar in order to avoid creating new a stacking group
- document.getElementById('noVNC_control_bar_anchor')
- .classList.remove("noVNC_idle");
- UI.idleControlbarTimeout = window.setTimeout(UI.idleControlbar, 2000);
- },
-
- idleControlbar() {
- document.getElementById('noVNC_control_bar_anchor')
- .classList.add("noVNC_idle");
- },
-
- keepControlbar() {
- clearTimeout(UI.closeControlbarTimeout);
- },
-
- openControlbar() {
- document.getElementById('noVNC_control_bar')
- .classList.add("noVNC_open");
- },
-
- closeControlbar() {
- UI.closeAllPanels();
- document.getElementById('noVNC_control_bar')
- .classList.remove("noVNC_open");
- },
-
- toggleControlbar() {
- if (document.getElementById('noVNC_control_bar')
- .classList.contains("noVNC_open")) {
- UI.closeControlbar();
- } else {
- UI.openControlbar();
- }
- },
-
- toggleControlbarSide() {
- // Temporarily disable animation, if bar is displayed, to avoid weird
- // movement. The transitionend-event will not fire when display=none.
- const bar = document.getElementById('noVNC_control_bar');
- const barDisplayStyle = window.getComputedStyle(bar).display;
- if (barDisplayStyle !== 'none') {
- bar.style.transitionDuration = '0s';
- bar.addEventListener('transitionend', () => bar.style.transitionDuration = '');
- }
-
- const anchor = document.getElementById('noVNC_control_bar_anchor');
- if (anchor.classList.contains("noVNC_right")) {
- WebUtil.writeSetting('controlbar_pos', 'left');
- anchor.classList.remove("noVNC_right");
- } else {
- WebUtil.writeSetting('controlbar_pos', 'right');
- anchor.classList.add("noVNC_right");
- }
-
- // Consider this a movement of the handle
- UI.controlbarDrag = true;
- },
-
- showControlbarHint(show) {
- const hint = document.getElementById('noVNC_control_bar_hint');
- if (show) {
- hint.classList.add("noVNC_active");
- } else {
- hint.classList.remove("noVNC_active");
- }
- },
-
- dragControlbarHandle(e) {
- if (!UI.controlbarGrabbed) return;
-
- const ptr = getPointerEvent(e);
-
- const anchor = document.getElementById('noVNC_control_bar_anchor');
- if (ptr.clientX < (window.innerWidth * 0.1)) {
- if (anchor.classList.contains("noVNC_right")) {
- UI.toggleControlbarSide();
- }
- } else if (ptr.clientX > (window.innerWidth * 0.9)) {
- if (!anchor.classList.contains("noVNC_right")) {
- UI.toggleControlbarSide();
- }
- }
-
- if (!UI.controlbarDrag) {
- const dragDistance = Math.abs(ptr.clientY - UI.controlbarMouseDownClientY);
-
- if (dragDistance < dragThreshold) return;
-
- UI.controlbarDrag = true;
- }
-
- const eventY = ptr.clientY - UI.controlbarMouseDownOffsetY;
-
- UI.moveControlbarHandle(eventY);
-
- e.preventDefault();
- e.stopPropagation();
- UI.keepControlbar();
- UI.activateControlbar();
- },
-
- // Move the handle but don't allow any position outside the bounds
- moveControlbarHandle(viewportRelativeY) {
- const handle = document.getElementById("noVNC_control_bar_handle");
- const handleHeight = handle.getBoundingClientRect().height;
- const controlbarBounds = document.getElementById("noVNC_control_bar")
- .getBoundingClientRect();
- const margin = 10;
-
- // These heights need to be non-zero for the below logic to work
- if (handleHeight === 0 || controlbarBounds.height === 0) {
- return;
- }
-
- let newY = viewportRelativeY;
-
- // Check if the coordinates are outside the control bar
- if (newY < controlbarBounds.top + margin) {
- // Force coordinates to be below the top of the control bar
- newY = controlbarBounds.top + margin;
-
- } else if (newY > controlbarBounds.top +
- controlbarBounds.height - handleHeight - margin) {
- // Force coordinates to be above the bottom of the control bar
- newY = controlbarBounds.top +
- controlbarBounds.height - handleHeight - margin;
- }
-
- // Corner case: control bar too small for stable position
- if (controlbarBounds.height < (handleHeight + margin * 2)) {
- newY = controlbarBounds.top +
- (controlbarBounds.height - handleHeight) / 2;
- }
-
- // The transform needs coordinates that are relative to the parent
- const parentRelativeY = newY - controlbarBounds.top;
- handle.style.transform = "translateY(" + parentRelativeY + "px)";
- },
-
- updateControlbarHandle() {
- // Since the control bar is fixed on the viewport and not the page,
- // the move function expects coordinates relative the the viewport.
- const handle = document.getElementById("noVNC_control_bar_handle");
- const handleBounds = handle.getBoundingClientRect();
- UI.moveControlbarHandle(handleBounds.top);
- },
-
- controlbarHandleMouseUp(e) {
- if ((e.type == "mouseup") && (e.button != 0)) return;
-
- // mouseup and mousedown on the same place toggles the controlbar
- if (UI.controlbarGrabbed && !UI.controlbarDrag) {
- UI.toggleControlbar();
- e.preventDefault();
- e.stopPropagation();
- UI.keepControlbar();
- UI.activateControlbar();
- }
- UI.controlbarGrabbed = false;
- UI.showControlbarHint(false);
- },
-
- controlbarHandleMouseDown(e) {
- if ((e.type == "mousedown") && (e.button != 0)) return;
-
- const ptr = getPointerEvent(e);
-
- const handle = document.getElementById("noVNC_control_bar_handle");
- const bounds = handle.getBoundingClientRect();
-
- // Touch events have implicit capture
- if (e.type === "mousedown") {
- setCapture(handle);
- }
-
- UI.controlbarGrabbed = true;
- UI.controlbarDrag = false;
-
- UI.showControlbarHint(true);
-
- UI.controlbarMouseDownClientY = ptr.clientY;
- UI.controlbarMouseDownOffsetY = ptr.clientY - bounds.top;
- e.preventDefault();
- e.stopPropagation();
- UI.keepControlbar();
- UI.activateControlbar();
- },
-
- toggleExpander(e) {
- if (this.classList.contains("noVNC_open")) {
- this.classList.remove("noVNC_open");
- } else {
- this.classList.add("noVNC_open");
- }
- },
-
- /* ------^-------
- * /VISUAL
- * ==============
- * SETTINGS
- * ------v------*/
-
- // Initial page load read/initialization of settings
- initSetting(name, defVal) {
- // Check Query string followed by cookie
- let val = WebUtil.getConfigVar(name);
- if (val === null) {
- val = WebUtil.readSetting(name, defVal);
- }
- WebUtil.setSetting(name, val);
- UI.updateSetting(name);
- return val;
- },
-
- // Set the new value, update and disable form control setting
- forceSetting(name, val) {
- WebUtil.setSetting(name, val);
- UI.updateSetting(name);
- UI.disableSetting(name);
- },
-
- // Update cookie and form control setting. If value is not set, then
- // updates from control to current cookie setting.
- updateSetting(name) {
-
- // Update the settings control
- let value = UI.getSetting(name);
-
- const ctrl = document.getElementById('noVNC_setting_' + name);
- if (ctrl.type === 'checkbox') {
- ctrl.checked = value;
-
- } else if (typeof ctrl.options !== 'undefined') {
- for (let i = 0; i < ctrl.options.length; i += 1) {
- if (ctrl.options[i].value === value) {
- ctrl.selectedIndex = i;
- break;
- }
- }
- } else {
- /*Weird IE9 error leads to 'null' appearring
+const UI = {
+ connected: false,
+ desktopName: "",
+
+ statusTimeout: null,
+ hideKeyboardTimeout: null,
+ idleControlbarTimeout: null,
+ closeControlbarTimeout: null,
+
+ controlbarGrabbed: false,
+ controlbarDrag: false,
+ controlbarMouseDownClientY: 0,
+ controlbarMouseDownOffsetY: 0,
+
+ lastKeyboardinput: null,
+ defaultKeyboardinputLen: 100,
+
+ inhibitReconnect: true,
+ reconnectCallback: null,
+ reconnectPassword: null,
+
+ prime() {
+ return WebUtil.initSettings().then(() => {
+ if (
+ document.readyState === "interactive" ||
+ document.readyState === "complete"
+ ) {
+ return UI.start();
+ }
+
+ return new Promise((resolve, reject) => {
+ document.addEventListener("DOMContentLoaded", () =>
+ UI.start().then(resolve).catch(reject)
+ );
+ });
+ });
+ },
+
+ // Render default UI and initialize settings menu
+ start() {
+ UI.initSettings();
+
+ // Translate the DOM
+ l10n.translateDOM();
+
+ WebUtil.fetchJSON("./package.json")
+ .then((packageInfo) => {
+ Array.from(document.getElementsByClassName("noVNC_version")).forEach(
+ (el) => (el.innerText = packageInfo.version)
+ );
+ })
+ .catch((err) => {
+ Log.Error("Couldn't fetch package.json: " + err);
+ Array.from(document.getElementsByClassName("noVNC_version_wrapper"))
+ .concat(
+ Array.from(
+ document.getElementsByClassName("noVNC_version_separator")
+ )
+ )
+ .forEach((el) => (el.style.display = "none"));
+ });
+
+ // Adapt the interface for touch screen devices
+ if (isTouchDevice) {
+ document.documentElement.classList.add("noVNC_touch");
+ // Remove the address bar
+ setTimeout(() => window.scrollTo(0, 1), 100);
+ }
+
+ // Restore control bar position
+ if (WebUtil.readSetting("controlbar_pos") === "right") {
+ UI.toggleControlbarSide();
+ }
+
+ UI.initFullscreen();
+
+ // Setup event handlers
+ UI.addControlbarHandlers();
+ UI.addTouchSpecificHandlers();
+ UI.addExtraKeysHandlers();
+ UI.addMachineHandlers();
+ UI.addConnectionControlHandlers();
+ UI.addClipboardHandlers();
+ UI.addSettingsHandlers();
+ document
+ .getElementById("noVNC_status")
+ .addEventListener("click", UI.hideStatus);
+
+ // Bootstrap fallback input handler
+ UI.keyboardinputReset();
+
+ UI.openControlbar();
+
+ UI.updateVisualState("init");
+
+ document.documentElement.classList.remove("noVNC_loading");
+
+ let autoconnect = WebUtil.getConfigVar("autoconnect", false);
+ if (autoconnect === "true" || autoconnect == "1") {
+ autoconnect = true;
+ UI.connect();
+ } else {
+ autoconnect = false;
+ // Show the connect panel on first load unless autoconnecting
+ UI.openConnectPanel();
+ }
+
+ return Promise.resolve(UI.rfb);
+ },
+
+ initFullscreen() {
+ // Only show the button if fullscreen is properly supported
+ // * Safari doesn't support alphanumerical input while in fullscreen
+ if (
+ !isSafari() &&
+ (document.documentElement.requestFullscreen ||
+ document.documentElement.mozRequestFullScreen ||
+ document.documentElement.webkitRequestFullscreen ||
+ document.body.msRequestFullscreen)
+ ) {
+ document
+ .getElementById("noVNC_fullscreen_button")
+ .classList.remove("noVNC_hidden");
+ UI.addFullscreenHandlers();
+ }
+ },
+
+ initSettings() {
+ // Logging selection dropdown
+ const llevels = ["error", "warn", "info", "debug"];
+ for (let i = 0; i < llevels.length; i += 1) {
+ UI.addOption(
+ document.getElementById("noVNC_setting_logging"),
+ llevels[i],
+ llevels[i]
+ );
+ }
+
+ // Settings with immediate effects
+ UI.initSetting("logging", "warn");
+ UI.updateLogging();
+
+ // if port == 80 (or 443) then it won't be present and should be
+ // set manually
+ let port = window.location.port;
+ if (!port) {
+ if (window.location.protocol.substring(0, 5) == "https") {
+ port = 443;
+ } else if (window.location.protocol.substring(0, 4) == "http") {
+ port = 80;
+ }
+ }
+
+ /* Populate the controls if defaults are provided in the URL */
+ UI.initSetting("host", window.location.hostname);
+ UI.initSetting("port", port);
+ UI.initSetting("encrypt", window.location.protocol === "https:");
+ UI.initSetting("view_clip", false);
+ UI.initSetting("resize", "off");
+ UI.initSetting("quality", 6);
+ UI.initSetting("compression", 2);
+ UI.initSetting("shared", true);
+ UI.initSetting("view_only", false);
+ UI.initSetting("show_dot", false);
+ UI.initSetting("path", "websockify");
+ UI.initSetting("repeaterID", "");
+ UI.initSetting("reconnect", false);
+ UI.initSetting("reconnect_delay", 5000);
+
+ // ######################## CUSTOM CODE ########################
+ UI.initSetting("resize", "remote");
+ UI.initSetting("reconnect", true);
+ // update settings cannot be used anymore to force settings
+ WebUtil.setSetting(
+ "path",
+ window.location.pathname
+ .substring(0, window.location.pathname.lastIndexOf("/"))
+ .replace(/^\//, "") + "/websockify"
+ );
+ // ######################## END CUSTOM CODE ########################
+
+ UI.setupSettingLabels();
+ },
+ // Adds a link to the label elements on the corresponding input elements
+ setupSettingLabels() {
+ const labels = document.getElementsByTagName("LABEL");
+ for (let i = 0; i < labels.length; i++) {
+ const htmlFor = labels[i].htmlFor;
+ if (htmlFor != "") {
+ const elem = document.getElementById(htmlFor);
+ if (elem) elem.label = labels[i];
+ } else {
+ // If 'for' isn't set, use the first input element child
+ const children = labels[i].children;
+ for (let j = 0; j < children.length; j++) {
+ if (children[j].form !== undefined) {
+ children[j].label = labels[i];
+ break;
+ }
+ }
+ }
+ }
+ },
+
+ /* ------^-------
+ * /INIT
+ * ==============
+ * EVENT HANDLERS
+ * ------v------*/
+
+ addControlbarHandlers() {
+ document
+ .getElementById("noVNC_control_bar")
+ .addEventListener("mousemove", UI.activateControlbar);
+ document
+ .getElementById("noVNC_control_bar")
+ .addEventListener("mouseup", UI.activateControlbar);
+ document
+ .getElementById("noVNC_control_bar")
+ .addEventListener("mousedown", UI.activateControlbar);
+ document
+ .getElementById("noVNC_control_bar")
+ .addEventListener("keydown", UI.activateControlbar);
+
+ document
+ .getElementById("noVNC_control_bar")
+ .addEventListener("mousedown", UI.keepControlbar);
+ document
+ .getElementById("noVNC_control_bar")
+ .addEventListener("keydown", UI.keepControlbar);
+
+ document
+ .getElementById("noVNC_view_drag_button")
+ .addEventListener("click", UI.toggleViewDrag);
+
+ document
+ .getElementById("noVNC_control_bar_handle")
+ .addEventListener("mousedown", UI.controlbarHandleMouseDown);
+ document
+ .getElementById("noVNC_control_bar_handle")
+ .addEventListener("mouseup", UI.controlbarHandleMouseUp);
+ document
+ .getElementById("noVNC_control_bar_handle")
+ .addEventListener("mousemove", UI.dragControlbarHandle);
+ // resize events aren't available for elements
+ window.addEventListener("resize", UI.updateControlbarHandle);
+
+ const exps = document.getElementsByClassName("noVNC_expander");
+ for (let i = 0; i < exps.length; i++) {
+ exps[i].addEventListener("click", UI.toggleExpander);
+ }
+ },
+
+ addTouchSpecificHandlers() {
+ document
+ .getElementById("noVNC_keyboard_button")
+ .addEventListener("click", UI.toggleVirtualKeyboard);
+
+ UI.touchKeyboard = new Keyboard(
+ document.getElementById("noVNC_keyboardinput")
+ );
+ UI.touchKeyboard.onkeyevent = UI.keyEvent;
+ UI.touchKeyboard.grab();
+ document
+ .getElementById("noVNC_keyboardinput")
+ .addEventListener("input", UI.keyInput);
+ document
+ .getElementById("noVNC_keyboardinput")
+ .addEventListener("focus", UI.onfocusVirtualKeyboard);
+ document
+ .getElementById("noVNC_keyboardinput")
+ .addEventListener("blur", UI.onblurVirtualKeyboard);
+ document
+ .getElementById("noVNC_keyboardinput")
+ .addEventListener("submit", () => false);
+
+ document.documentElement.addEventListener(
+ "mousedown",
+ UI.keepVirtualKeyboard,
+ true
+ );
+
+ document
+ .getElementById("noVNC_control_bar")
+ .addEventListener("touchstart", UI.activateControlbar);
+ document
+ .getElementById("noVNC_control_bar")
+ .addEventListener("touchmove", UI.activateControlbar);
+ document
+ .getElementById("noVNC_control_bar")
+ .addEventListener("touchend", UI.activateControlbar);
+ document
+ .getElementById("noVNC_control_bar")
+ .addEventListener("input", UI.activateControlbar);
+
+ document
+ .getElementById("noVNC_control_bar")
+ .addEventListener("touchstart", UI.keepControlbar);
+ document
+ .getElementById("noVNC_control_bar")
+ .addEventListener("input", UI.keepControlbar);
+
+ document
+ .getElementById("noVNC_control_bar_handle")
+ .addEventListener("touchstart", UI.controlbarHandleMouseDown);
+ document
+ .getElementById("noVNC_control_bar_handle")
+ .addEventListener("touchend", UI.controlbarHandleMouseUp);
+ document
+ .getElementById("noVNC_control_bar_handle")
+ .addEventListener("touchmove", UI.dragControlbarHandle);
+ },
+
+ addExtraKeysHandlers() {
+ document
+ .getElementById("noVNC_toggle_extra_keys_button")
+ .addEventListener("click", UI.toggleExtraKeys);
+ document
+ .getElementById("noVNC_toggle_ctrl_button")
+ .addEventListener("click", UI.toggleCtrl);
+ document
+ .getElementById("noVNC_toggle_windows_button")
+ .addEventListener("click", UI.toggleWindows);
+ document
+ .getElementById("noVNC_toggle_alt_button")
+ .addEventListener("click", UI.toggleAlt);
+ document
+ .getElementById("noVNC_send_tab_button")
+ .addEventListener("click", UI.sendTab);
+ document
+ .getElementById("noVNC_send_esc_button")
+ .addEventListener("click", UI.sendEsc);
+ document
+ .getElementById("noVNC_send_ctrl_alt_del_button")
+ .addEventListener("click", UI.sendCtrlAltDel);
+ },
+
+ addMachineHandlers() {
+ document
+ .getElementById("noVNC_shutdown_button")
+ .addEventListener("click", () => UI.rfb.machineShutdown());
+ document
+ .getElementById("noVNC_reboot_button")
+ .addEventListener("click", () => UI.rfb.machineReboot());
+ document
+ .getElementById("noVNC_reset_button")
+ .addEventListener("click", () => UI.rfb.machineReset());
+ document
+ .getElementById("noVNC_power_button")
+ .addEventListener("click", UI.togglePowerPanel);
+ },
+
+ addConnectionControlHandlers() {
+ document
+ .getElementById("noVNC_disconnect_button")
+ .addEventListener("click", UI.disconnect);
+ document
+ .getElementById("noVNC_connect_button")
+ .addEventListener("click", UI.connect);
+ document
+ .getElementById("noVNC_cancel_reconnect_button")
+ .addEventListener("click", UI.cancelReconnect);
+
+ document
+ .getElementById("noVNC_credentials_button")
+ .addEventListener("click", UI.setCredentials);
+ },
+
+ addClipboardHandlers() {
+ document
+ .getElementById("noVNC_clipboard_button")
+ .addEventListener("click", UI.toggleClipboardPanel);
+ document
+ .getElementById("noVNC_clipboard_text")
+ .addEventListener("change", UI.clipboardSend);
+ document
+ .getElementById("noVNC_clipboard_clear_button")
+ .addEventListener("click", UI.clipboardClear);
+ },
+
+ // Add a call to save settings when the element changes,
+ // unless the optional parameter changeFunc is used instead.
+ addSettingChangeHandler(name, changeFunc) {
+ const settingElem = document.getElementById("noVNC_setting_" + name);
+ if (changeFunc === undefined) {
+ changeFunc = () => UI.saveSetting(name);
+ }
+ settingElem.addEventListener("change", changeFunc);
+ },
+
+ addSettingsHandlers() {
+ document
+ .getElementById("noVNC_settings_button")
+ .addEventListener("click", UI.toggleSettingsPanel);
+
+ UI.addSettingChangeHandler("encrypt");
+ UI.addSettingChangeHandler("resize");
+ UI.addSettingChangeHandler("resize", UI.applyResizeMode);
+ UI.addSettingChangeHandler("resize", UI.updateViewClip);
+ UI.addSettingChangeHandler("quality");
+ UI.addSettingChangeHandler("quality", UI.updateQuality);
+ UI.addSettingChangeHandler("compression");
+ UI.addSettingChangeHandler("compression", UI.updateCompression);
+ UI.addSettingChangeHandler("view_clip");
+ UI.addSettingChangeHandler("view_clip", UI.updateViewClip);
+ UI.addSettingChangeHandler("shared");
+ UI.addSettingChangeHandler("view_only");
+ UI.addSettingChangeHandler("view_only", UI.updateViewOnly);
+ UI.addSettingChangeHandler("show_dot");
+ UI.addSettingChangeHandler("show_dot", UI.updateShowDotCursor);
+ UI.addSettingChangeHandler("host");
+ UI.addSettingChangeHandler("port");
+ UI.addSettingChangeHandler("path");
+ UI.addSettingChangeHandler("repeaterID");
+ UI.addSettingChangeHandler("logging");
+ UI.addSettingChangeHandler("logging", UI.updateLogging);
+ UI.addSettingChangeHandler("reconnect");
+ UI.addSettingChangeHandler("reconnect_delay");
+ },
+
+ addFullscreenHandlers() {
+ document
+ .getElementById("noVNC_fullscreen_button")
+ .addEventListener("click", UI.toggleFullscreen);
+
+ window.addEventListener("fullscreenchange", UI.updateFullscreenButton);
+ window.addEventListener("mozfullscreenchange", UI.updateFullscreenButton);
+ window.addEventListener(
+ "webkitfullscreenchange",
+ UI.updateFullscreenButton
+ );
+ window.addEventListener("msfullscreenchange", UI.updateFullscreenButton);
+ },
+
+ /* ------^-------
+ * /EVENT HANDLERS
+ * ==============
+ * VISUAL
+ * ------v------*/
+
+ // Disable/enable controls depending on connection state
+ updateVisualState(state) {
+ document.documentElement.classList.remove("noVNC_connecting");
+ document.documentElement.classList.remove("noVNC_connected");
+ document.documentElement.classList.remove("noVNC_disconnecting");
+ document.documentElement.classList.remove("noVNC_reconnecting");
+
+ const transitionElem = document.getElementById("noVNC_transition_text");
+ switch (state) {
+ case "init":
+ break;
+ case "connecting":
+ transitionElem.textContent = _("Connecting...");
+ document.documentElement.classList.add("noVNC_connecting");
+ break;
+ case "connected":
+ document.documentElement.classList.add("noVNC_connected");
+ break;
+ case "disconnecting":
+ transitionElem.textContent = _("Disconnecting...");
+ document.documentElement.classList.add("noVNC_disconnecting");
+ break;
+ case "disconnected":
+ break;
+ case "reconnecting":
+ transitionElem.textContent = _("Reconnecting...");
+ document.documentElement.classList.add("noVNC_reconnecting");
+ break;
+ default:
+ Log.Error("Invalid visual state: " + state);
+ UI.showStatus(_("Internal error"), "error");
+ return;
+ }
+
+ if (UI.connected) {
+ UI.updateViewClip();
+
+ UI.disableSetting("encrypt");
+ UI.disableSetting("shared");
+ UI.disableSetting("host");
+ UI.disableSetting("port");
+ UI.disableSetting("path");
+ UI.disableSetting("repeaterID");
+
+ // Hide the controlbar after 2 seconds
+ UI.closeControlbarTimeout = setTimeout(UI.closeControlbar, 2000);
+ } else {
+ UI.enableSetting("encrypt");
+ UI.enableSetting("shared");
+ UI.enableSetting("host");
+ UI.enableSetting("port");
+ UI.enableSetting("path");
+ UI.enableSetting("repeaterID");
+ UI.updatePowerButton();
+ UI.keepControlbar();
+ }
+
+ // State change closes dialogs as they may not be relevant
+ // anymore
+ UI.closeAllPanels();
+ document
+ .getElementById("noVNC_credentials_dlg")
+ .classList.remove("noVNC_open");
+ },
+
+ showStatus(text, statusType, time) {
+ const statusElem = document.getElementById("noVNC_status");
+
+ if (typeof statusType === "undefined") {
+ statusType = "normal";
+ }
+
+ // Don't overwrite more severe visible statuses and never
+ // errors. Only shows the first error.
+ if (statusElem.classList.contains("noVNC_open")) {
+ if (statusElem.classList.contains("noVNC_status_error")) {
+ return;
+ }
+ if (
+ statusElem.classList.contains("noVNC_status_warn") &&
+ statusType === "normal"
+ ) {
+ return;
+ }
+ }
+
+ clearTimeout(UI.statusTimeout);
+
+ switch (statusType) {
+ case "error":
+ statusElem.classList.remove("noVNC_status_warn");
+ statusElem.classList.remove("noVNC_status_normal");
+ statusElem.classList.add("noVNC_status_error");
+ break;
+ case "warning":
+ case "warn":
+ statusElem.classList.remove("noVNC_status_error");
+ statusElem.classList.remove("noVNC_status_normal");
+ statusElem.classList.add("noVNC_status_warn");
+ break;
+ case "normal":
+ case "info":
+ default:
+ statusElem.classList.remove("noVNC_status_error");
+ statusElem.classList.remove("noVNC_status_warn");
+ statusElem.classList.add("noVNC_status_normal");
+ break;
+ }
+
+ statusElem.textContent = text;
+ statusElem.classList.add("noVNC_open");
+
+ // If no time was specified, show the status for 1.5 seconds
+ if (typeof time === "undefined") {
+ time = 1500;
+ }
+
+ // Error messages do not timeout
+ if (statusType !== "error") {
+ UI.statusTimeout = window.setTimeout(UI.hideStatus, time);
+ }
+ },
+
+ hideStatus() {
+ clearTimeout(UI.statusTimeout);
+ document.getElementById("noVNC_status").classList.remove("noVNC_open");
+ },
+
+ activateControlbar(event) {
+ clearTimeout(UI.idleControlbarTimeout);
+ // We manipulate the anchor instead of the actual control
+ // bar in order to avoid creating new a stacking group
+ document
+ .getElementById("noVNC_control_bar_anchor")
+ .classList.remove("noVNC_idle");
+ UI.idleControlbarTimeout = window.setTimeout(UI.idleControlbar, 2000);
+ },
+
+ idleControlbar() {
+ // Don't fade if a child of the control bar has focus
+ if (
+ document
+ .getElementById("noVNC_control_bar")
+ .contains(document.activeElement) &&
+ document.hasFocus()
+ ) {
+ UI.activateControlbar();
+ return;
+ }
+
+ document
+ .getElementById("noVNC_control_bar_anchor")
+ .classList.add("noVNC_idle");
+ },
+
+ keepControlbar() {
+ clearTimeout(UI.closeControlbarTimeout);
+ },
+
+ openControlbar() {
+ document.getElementById("noVNC_control_bar").classList.add("noVNC_open");
+ },
+
+ closeControlbar() {
+ UI.closeAllPanels();
+ document.getElementById("noVNC_control_bar").classList.remove("noVNC_open");
+ UI.rfb.focus();
+ },
+
+ toggleControlbar() {
+ if (
+ document
+ .getElementById("noVNC_control_bar")
+ .classList.contains("noVNC_open")
+ ) {
+ UI.closeControlbar();
+ } else {
+ UI.openControlbar();
+ }
+ },
+
+ toggleControlbarSide() {
+ // Temporarily disable animation, if bar is displayed, to avoid weird
+ // movement. The transitionend-event will not fire when display=none.
+ const bar = document.getElementById("noVNC_control_bar");
+ const barDisplayStyle = window.getComputedStyle(bar).display;
+ if (barDisplayStyle !== "none") {
+ bar.style.transitionDuration = "0s";
+ bar.addEventListener(
+ "transitionend",
+ () => (bar.style.transitionDuration = "")
+ );
+ }
+
+ const anchor = document.getElementById("noVNC_control_bar_anchor");
+ if (anchor.classList.contains("noVNC_right")) {
+ WebUtil.writeSetting("controlbar_pos", "left");
+ anchor.classList.remove("noVNC_right");
+ } else {
+ WebUtil.writeSetting("controlbar_pos", "right");
+ anchor.classList.add("noVNC_right");
+ }
+
+ // Consider this a movement of the handle
+ UI.controlbarDrag = true;
+ },
+
+ showControlbarHint(show) {
+ const hint = document.getElementById("noVNC_control_bar_hint");
+ if (show) {
+ hint.classList.add("noVNC_active");
+ } else {
+ hint.classList.remove("noVNC_active");
+ }
+ },
+
+ dragControlbarHandle(e) {
+ if (!UI.controlbarGrabbed) return;
+
+ const ptr = getPointerEvent(e);
+
+ const anchor = document.getElementById("noVNC_control_bar_anchor");
+ if (ptr.clientX < window.innerWidth * 0.1) {
+ if (anchor.classList.contains("noVNC_right")) {
+ UI.toggleControlbarSide();
+ }
+ } else if (ptr.clientX > window.innerWidth * 0.9) {
+ if (!anchor.classList.contains("noVNC_right")) {
+ UI.toggleControlbarSide();
+ }
+ }
+
+ if (!UI.controlbarDrag) {
+ const dragDistance = Math.abs(
+ ptr.clientY - UI.controlbarMouseDownClientY
+ );
+
+ if (dragDistance < dragThreshold) return;
+
+ UI.controlbarDrag = true;
+ }
+
+ const eventY = ptr.clientY - UI.controlbarMouseDownOffsetY;
+
+ UI.moveControlbarHandle(eventY);
+
+ e.preventDefault();
+ e.stopPropagation();
+ UI.keepControlbar();
+ UI.activateControlbar();
+ },
+
+ // Move the handle but don't allow any position outside the bounds
+ moveControlbarHandle(viewportRelativeY) {
+ const handle = document.getElementById("noVNC_control_bar_handle");
+ const handleHeight = handle.getBoundingClientRect().height;
+ const controlbarBounds = document
+ .getElementById("noVNC_control_bar")
+ .getBoundingClientRect();
+ const margin = 10;
+
+ // These heights need to be non-zero for the below logic to work
+ if (handleHeight === 0 || controlbarBounds.height === 0) {
+ return;
+ }
+
+ let newY = viewportRelativeY;
+
+ // Check if the coordinates are outside the control bar
+ if (newY < controlbarBounds.top + margin) {
+ // Force coordinates to be below the top of the control bar
+ newY = controlbarBounds.top + margin;
+ } else if (
+ newY >
+ controlbarBounds.top + controlbarBounds.height - handleHeight - margin
+ ) {
+ // Force coordinates to be above the bottom of the control bar
+ newY =
+ controlbarBounds.top + controlbarBounds.height - handleHeight - margin;
+ }
+
+ // Corner case: control bar too small for stable position
+ if (controlbarBounds.height < handleHeight + margin * 2) {
+ newY =
+ controlbarBounds.top + (controlbarBounds.height - handleHeight) / 2;
+ }
+
+ // The transform needs coordinates that are relative to the parent
+ const parentRelativeY = newY - controlbarBounds.top;
+ handle.style.transform = "translateY(" + parentRelativeY + "px)";
+ },
+
+ updateControlbarHandle() {
+ // Since the control bar is fixed on the viewport and not the page,
+ // the move function expects coordinates relative the the viewport.
+ const handle = document.getElementById("noVNC_control_bar_handle");
+ const handleBounds = handle.getBoundingClientRect();
+ UI.moveControlbarHandle(handleBounds.top);
+ },
+
+ controlbarHandleMouseUp(e) {
+ if (e.type == "mouseup" && e.button != 0) return;
+
+ // mouseup and mousedown on the same place toggles the controlbar
+ if (UI.controlbarGrabbed && !UI.controlbarDrag) {
+ UI.toggleControlbar();
+ e.preventDefault();
+ e.stopPropagation();
+ UI.keepControlbar();
+ UI.activateControlbar();
+ }
+ UI.controlbarGrabbed = false;
+ UI.showControlbarHint(false);
+ },
+
+ controlbarHandleMouseDown(e) {
+ if (e.type == "mousedown" && e.button != 0) return;
+
+ const ptr = getPointerEvent(e);
+
+ const handle = document.getElementById("noVNC_control_bar_handle");
+ const bounds = handle.getBoundingClientRect();
+
+ // Touch events have implicit capture
+ if (e.type === "mousedown") {
+ setCapture(handle);
+ }
+
+ UI.controlbarGrabbed = true;
+ UI.controlbarDrag = false;
+
+ UI.showControlbarHint(true);
+
+ UI.controlbarMouseDownClientY = ptr.clientY;
+ UI.controlbarMouseDownOffsetY = ptr.clientY - bounds.top;
+ e.preventDefault();
+ e.stopPropagation();
+ UI.keepControlbar();
+ UI.activateControlbar();
+ },
+
+ toggleExpander(e) {
+ if (this.classList.contains("noVNC_open")) {
+ this.classList.remove("noVNC_open");
+ } else {
+ this.classList.add("noVNC_open");
+ }
+ },
+
+ /* ------^-------
+ * /VISUAL
+ * ==============
+ * SETTINGS
+ * ------v------*/
+
+ // Initial page load read/initialization of settings
+ initSetting(name, defVal) {
+ // Check Query string followed by cookie
+ let val = WebUtil.getConfigVar(name);
+ if (val === null) {
+ val = WebUtil.readSetting(name, defVal);
+ }
+ WebUtil.setSetting(name, val);
+ UI.updateSetting(name);
+ return val;
+ },
+
+ // Set the new value, update and disable form control setting
+ forceSetting(name, val) {
+ WebUtil.setSetting(name, val);
+ UI.updateSetting(name);
+ UI.disableSetting(name);
+ },
+
+ // Update cookie and form control setting. If value is not set, then
+ // updates from control to current cookie setting.
+ updateSetting(name) {
+ // Update the settings control
+ let value = UI.getSetting(name);
+
+ const ctrl = document.getElementById("noVNC_setting_" + name);
+ if (ctrl.type === "checkbox") {
+ ctrl.checked = value;
+ } else if (typeof ctrl.options !== "undefined") {
+ for (let i = 0; i < ctrl.options.length; i += 1) {
+ if (ctrl.options[i].value === value) {
+ ctrl.selectedIndex = i;
+ break;
+ }
+ }
+ } else {
+ /*Weird IE9 error leads to 'null' appearring
in textboxes instead of ''.*/
- if (value === null) {
- value = "";
- }
- ctrl.value = value;
- }
- },
-
- // Save control setting to cookie
- saveSetting(name) {
- const ctrl = document.getElementById('noVNC_setting_' + name);
- let val;
- if (ctrl.type === 'checkbox') {
- val = ctrl.checked;
- } else if (typeof ctrl.options !== 'undefined') {
- val = ctrl.options[ctrl.selectedIndex].value;
- } else {
- val = ctrl.value;
- }
- WebUtil.writeSetting(name, val);
- //Log.Debug("Setting saved '" + name + "=" + val + "'");
- return val;
- },
-
- // Read form control compatible setting from cookie
- getSetting(name) {
- const ctrl = document.getElementById('noVNC_setting_' + name);
- let val = WebUtil.readSetting(name);
- if (typeof val !== 'undefined' && val !== null && ctrl.type === 'checkbox') {
- if (val.toString().toLowerCase() in {
- '0': 1,
- 'no': 1,
- 'false': 1
- }) {
- val = false;
- } else {
- val = true;
- }
- }
- return val;
- },
-
- // These helpers compensate for the lack of parent-selectors and
- // previous-sibling-selectors in CSS which are needed when we want to
- // disable the labels that belong to disabled input elements.
- disableSetting(name) {
- const ctrl = document.getElementById('noVNC_setting_' + name);
- ctrl.disabled = true;
- ctrl.label.classList.add('noVNC_disabled');
- },
-
- enableSetting(name) {
- const ctrl = document.getElementById('noVNC_setting_' + name);
- ctrl.disabled = false;
- ctrl.label.classList.remove('noVNC_disabled');
- },
-
- /* ------^-------
- * /SETTINGS
- * ==============
- * PANELS
- * ------v------*/
-
- closeAllPanels() {
- UI.closeSettingsPanel();
- UI.closePowerPanel();
- UI.closeClipboardPanel();
- UI.closeExtraKeys();
- },
-
- /* ------^-------
- * /PANELS
- * ==============
- * SETTINGS (panel)
- * ------v------*/
-
- openSettingsPanel() {
- UI.closeAllPanels();
- UI.openControlbar();
-
- // Refresh UI elements from saved cookies
- UI.updateSetting('encrypt');
- UI.updateSetting('view_clip');
- UI.updateSetting('resize');
- UI.updateSetting('shared');
- UI.updateSetting('view_only');
- UI.updateSetting('path');
- UI.updateSetting('repeaterID');
- UI.updateSetting('logging');
- UI.updateSetting('reconnect');
- UI.updateSetting('reconnect_delay');
-
- document.getElementById('noVNC_settings')
- .classList.add("noVNC_open");
- document.getElementById('noVNC_settings_button')
- .classList.add("noVNC_selected");
- },
-
- closeSettingsPanel() {
- document.getElementById('noVNC_settings')
- .classList.remove("noVNC_open");
- document.getElementById('noVNC_settings_button')
- .classList.remove("noVNC_selected");
- },
-
- toggleSettingsPanel() {
- if (document.getElementById('noVNC_settings')
- .classList.contains("noVNC_open")) {
- UI.closeSettingsPanel();
- } else {
- UI.openSettingsPanel();
- }
- },
-
- /* ------^-------
- * /SETTINGS
- * ==============
- * POWER
- * ------v------*/
-
- openPowerPanel() {
- UI.closeAllPanels();
- UI.openControlbar();
-
- document.getElementById('noVNC_power')
- .classList.add("noVNC_open");
- document.getElementById('noVNC_power_button')
- .classList.add("noVNC_selected");
- },
-
- closePowerPanel() {
- document.getElementById('noVNC_power')
- .classList.remove("noVNC_open");
- document.getElementById('noVNC_power_button')
- .classList.remove("noVNC_selected");
- },
-
- togglePowerPanel() {
- if (document.getElementById('noVNC_power')
- .classList.contains("noVNC_open")) {
- UI.closePowerPanel();
- } else {
- UI.openPowerPanel();
- }
- },
-
- // Disable/enable power button
- updatePowerButton() {
- if (UI.connected &&
- UI.rfb.capabilities.power &&
- !UI.rfb.viewOnly) {
- document.getElementById('noVNC_power_button')
- .classList.remove("noVNC_hidden");
- } else {
- document.getElementById('noVNC_power_button')
- .classList.add("noVNC_hidden");
- // Close power panel if open
- UI.closePowerPanel();
- }
- },
-
- /* ------^-------
- * /POWER
- * ==============
- * CLIPBOARD
- * ------v------*/
-
- openClipboardPanel() {
- UI.closeAllPanels();
- UI.openControlbar();
-
- document.getElementById('noVNC_clipboard')
- .classList.add("noVNC_open");
- document.getElementById('noVNC_clipboard_button')
- .classList.add("noVNC_selected");
- },
-
- closeClipboardPanel() {
- document.getElementById('noVNC_clipboard')
- .classList.remove("noVNC_open");
- document.getElementById('noVNC_clipboard_button')
- .classList.remove("noVNC_selected");
- },
-
- toggleClipboardPanel() {
- if (document.getElementById('noVNC_clipboard')
- .classList.contains("noVNC_open")) {
- UI.closeClipboardPanel();
- } else {
- UI.openClipboardPanel();
- }
- },
-
- clipboardReceive(e) {
- Log.Debug(">> UI.clipboardReceive: " + e.detail.text.substr(0, 40) + "...");
- document.getElementById('noVNC_clipboard_text').value = e.detail.text;
- Log.Debug("<< UI.clipboardReceive");
- },
-
- clipboardClear() {
- document.getElementById('noVNC_clipboard_text').value = "";
- UI.rfb.clipboardPasteFrom("");
- },
-
- clipboardSend() {
- const text = document.getElementById('noVNC_clipboard_text').value;
- Log.Debug(">> UI.clipboardSend: " + text.substr(0, 40) + "...");
- UI.rfb.clipboardPasteFrom(text);
- Log.Debug("<< UI.clipboardSend");
- },
-
- /* ------^-------
- * /CLIPBOARD
- * ==============
- * CONNECTION
- * ------v------*/
-
- openConnectPanel() {
- document.getElementById('noVNC_connect_dlg')
- .classList.add("noVNC_open");
- },
-
- closeConnectPanel() {
- document.getElementById('noVNC_connect_dlg')
- .classList.remove("noVNC_open");
- },
-
- connect(event, password) {
-
- // Ignore when rfb already exists
- if (typeof UI.rfb !== 'undefined') {
- return;
- }
-
- const host = UI.getSetting('host');
- const port = UI.getSetting('port');
- const path = UI.getSetting('path');
-
- if (typeof password === 'undefined') {
- password = WebUtil.getConfigVar('password');
- UI.reconnect_password = password;
- }
-
- if (password === null) {
- password = undefined;
- }
-
- UI.hideStatus();
-
- if (!host) {
- Log.Error("Can't connect when host is: " + host);
- UI.showStatus(_("Must set host"), 'error');
- return;
- }
-
- UI.closeAllPanels();
- UI.closeConnectPanel();
-
- UI.updateVisualState('connecting');
-
- let url;
-
- url = UI.getSetting('encrypt') ? 'wss' : 'ws';
-
- url += '://' + host;
- if (port) {
- url += ':' + port;
- }
- url += '/' + path;
-
- UI.rfb = new RFB(document.getElementById('noVNC_container'), url, {
- shared: UI.getSetting('shared'),
- showDotCursor: UI.getSetting('show_dot'),
- repeaterID: UI.getSetting('repeaterID'),
- credentials: {
- password: password
- }
- });
- UI.rfb.addEventListener("connect", UI.connectFinished);
- UI.rfb.addEventListener("disconnect", UI.disconnectFinished);
- UI.rfb.addEventListener("credentialsrequired", UI.credentials);
- UI.rfb.addEventListener("securityfailure", UI.securityFailed);
- UI.rfb.addEventListener("capabilities", UI.updatePowerButton);
- UI.rfb.addEventListener("clipboard", UI.clipboardReceive);
- UI.rfb.addEventListener("bell", UI.bell);
- UI.rfb.addEventListener("desktopname", UI.updateDesktopName);
- UI.rfb.clipViewport = UI.getSetting('view_clip');
- UI.rfb.scaleViewport = UI.getSetting('resize') === 'scale';
- UI.rfb.resizeSession = UI.getSetting('resize') === 'remote';
-
- UI.updateViewOnly(); // requires UI.rfb
-
- // ######################## CUSTOM CODE ########################
- var clipboard = document.getElementById('clipboard');
- var controlBar = document.getElementById("noVNC_control_bar");
- var controlBarClipboard = document.getElementById("noVNC_clipboard_text")
-
- UI.rfb.addEventListener('clipboard', function (e) {
- clipboard.value = e.detail.text;
- });
-
- UI.rfb.addEventListener('connect', function (e) {
- var viewer = document.getElementsByTagName('canvas')[0];
-
- document.body.addEventListener('paste', function (e) {
- try {
- console.log("paste");
- if (controlBarClipboard.classList.contains("noVNC_open")) {
- return;
- }
- if (UI.rfb != null) {
- text = e.clipboardData.getData('text');
- console.log("copy clipboard from local to vnc: " + text);
- controlBarClipboard.value = text;
- UI.rfb.clipboardPasteFrom(text);
- }
- } catch (error) {
- console.error(error);
- }
- });
-
- document.body.addEventListener('keydown', function (e) {
- try {
- console.log("document keydown: " + e.keyCode);
- if (controlBar.classList.contains("noVNC_open")) {
- return;
- }
- if (e.keyCode !== 86) {
- e.preventDefault();
- } else {
- if (navigator.clipboard) {
- navigator.clipboard.readText()
- .then(text => {
- console.log("copy clipboard from local to vnc: " + text);
- controlBarClipboard.value = text;
- UI.rfb.clipboardPasteFrom(text);
- })
- .catch(err => {
- console.error('Failed to read clipboard contents: ', err);
- });
- }
- }
- setTimeout(function () {
- viewer.dispatchEvent(new e.constructor(e.type, e));
- if (UI.rfb != null) {
- UI.rfb.focus();
- }
- }, 1);
- } catch (error) {
- console.error(error);
- }
- });
-
- document.body.addEventListener('keyup', function (e) {
-
- try {
- console.log("document keyup");
- if (controlBar.classList.contains("noVNC_open")) {
- return;
- }
-
- if (e.keyCode !== 86) {
- e.preventDefault();
- }
- setTimeout(function () {
- viewer.dispatchEvent(new e.constructor(e.type, e));
- if (UI.rfb != null) {
- UI.rfb.focus();
- }
- }, 1);
- } catch (error) {
- console.error(error);
- }
- });
-
- viewer.addEventListener('keydown', function (e) {
- try {
- console.log("viewer keydown");
- if (e.ctrlKey) {
- if (controlBar.classList.contains("noVNC_open")) {
- return;
- }
- document.activeElement.blur();
- }
- } catch (error) {
- console.error(error);
- }
- });
-
- viewer.addEventListener('keyup', function (e) {
-
- try {
- console.log("viewer keyup: " + e.keyCode)
- if (e.ctrlKey && [67, 88].includes(e.keyCode)) {
- if (controlBar.classList.contains("noVNC_open")) {
- return;
- }
- if (clipboard != null) {
- console.log("copy clipboard from vnc to local: " + clipboard.value);
- clipboard.focus();
- clipboard.select();
-
- document.execCommand('copy');
- if (UI.rfb != null) {
- UI.rfb.focus();
- }
- }
-
- }
- } catch (error) {
- console.error(error);
- }
- });
- });
- // ######################## END CUSTOM CODE ########################
- },
-
- disconnect() {
- UI.closeAllPanels();
- UI.rfb.disconnect();
-
- UI.connected = false;
-
- // Disable automatic reconnecting
- UI.inhibit_reconnect = true;
-
- UI.updateVisualState('disconnecting');
-
- // Don't display the connection settings until we're actually disconnected
- },
-
- reconnect() {
- UI.reconnect_callback = null;
-
- // if reconnect has been disabled in the meantime, do nothing.
- if (UI.inhibit_reconnect) {
+ if (value === null) {
+ value = "";
+ }
+ ctrl.value = value;
+ }
+ },
+
+ // Save control setting to cookie
+ saveSetting(name) {
+ const ctrl = document.getElementById("noVNC_setting_" + name);
+ let val;
+ if (ctrl.type === "checkbox") {
+ val = ctrl.checked;
+ } else if (typeof ctrl.options !== "undefined") {
+ val = ctrl.options[ctrl.selectedIndex].value;
+ } else {
+ val = ctrl.value;
+ }
+ WebUtil.writeSetting(name, val);
+ //Log.Debug("Setting saved '" + name + "=" + val + "'");
+ return val;
+ },
+
+ // Read form control compatible setting from cookie
+ getSetting(name) {
+ const ctrl = document.getElementById("noVNC_setting_" + name);
+ let val = WebUtil.readSetting(name);
+ if (
+ typeof val !== "undefined" &&
+ val !== null &&
+ ctrl.type === "checkbox"
+ ) {
+ if (val.toString().toLowerCase() in { 0: 1, no: 1, false: 1 }) {
+ val = false;
+ } else {
+ val = true;
+ }
+ }
+ return val;
+ },
+
+ // These helpers compensate for the lack of parent-selectors and
+ // previous-sibling-selectors in CSS which are needed when we want to
+ // disable the labels that belong to disabled input elements.
+ disableSetting(name) {
+ const ctrl = document.getElementById("noVNC_setting_" + name);
+ ctrl.disabled = true;
+ ctrl.label.classList.add("noVNC_disabled");
+ },
+
+ enableSetting(name) {
+ const ctrl = document.getElementById("noVNC_setting_" + name);
+ ctrl.disabled = false;
+ ctrl.label.classList.remove("noVNC_disabled");
+ },
+
+ /* ------^-------
+ * /SETTINGS
+ * ==============
+ * PANELS
+ * ------v------*/
+
+ closeAllPanels() {
+ UI.closeSettingsPanel();
+ UI.closePowerPanel();
+ UI.closeClipboardPanel();
+ UI.closeExtraKeys();
+ },
+
+ /* ------^-------
+ * /PANELS
+ * ==============
+ * SETTINGS (panel)
+ * ------v------*/
+
+ openSettingsPanel() {
+ UI.closeAllPanels();
+ UI.openControlbar();
+
+ // Refresh UI elements from saved cookies
+ UI.updateSetting("encrypt");
+ UI.updateSetting("view_clip");
+ UI.updateSetting("resize");
+ UI.updateSetting("quality");
+ UI.updateSetting("compression");
+ UI.updateSetting("shared");
+ UI.updateSetting("view_only");
+ UI.updateSetting("path");
+ UI.updateSetting("repeaterID");
+ UI.updateSetting("logging");
+ UI.updateSetting("reconnect");
+ UI.updateSetting("reconnect_delay");
+
+ document.getElementById("noVNC_settings").classList.add("noVNC_open");
+ document
+ .getElementById("noVNC_settings_button")
+ .classList.add("noVNC_selected");
+ },
+
+ closeSettingsPanel() {
+ document.getElementById("noVNC_settings").classList.remove("noVNC_open");
+ document
+ .getElementById("noVNC_settings_button")
+ .classList.remove("noVNC_selected");
+ },
+
+ toggleSettingsPanel() {
+ if (
+ document.getElementById("noVNC_settings").classList.contains("noVNC_open")
+ ) {
+ UI.closeSettingsPanel();
+ } else {
+ UI.openSettingsPanel();
+ }
+ },
+
+ /* ------^-------
+ * /SETTINGS
+ * ==============
+ * POWER
+ * ------v------*/
+
+ openPowerPanel() {
+ UI.closeAllPanels();
+ UI.openControlbar();
+
+ document.getElementById("noVNC_power").classList.add("noVNC_open");
+ document
+ .getElementById("noVNC_power_button")
+ .classList.add("noVNC_selected");
+ },
+
+ closePowerPanel() {
+ document.getElementById("noVNC_power").classList.remove("noVNC_open");
+ document
+ .getElementById("noVNC_power_button")
+ .classList.remove("noVNC_selected");
+ },
+
+ togglePowerPanel() {
+ if (
+ document.getElementById("noVNC_power").classList.contains("noVNC_open")
+ ) {
+ UI.closePowerPanel();
+ } else {
+ UI.openPowerPanel();
+ }
+ },
+
+ // Disable/enable power button
+ updatePowerButton() {
+ if (UI.connected && UI.rfb.capabilities.power && !UI.rfb.viewOnly) {
+ document
+ .getElementById("noVNC_power_button")
+ .classList.remove("noVNC_hidden");
+ } else {
+ document
+ .getElementById("noVNC_power_button")
+ .classList.add("noVNC_hidden");
+ // Close power panel if open
+ UI.closePowerPanel();
+ }
+ },
+
+ /* ------^-------
+ * /POWER
+ * ==============
+ * CLIPBOARD
+ * ------v------*/
+
+ openClipboardPanel() {
+ UI.closeAllPanels();
+ UI.openControlbar();
+
+ document.getElementById("noVNC_clipboard").classList.add("noVNC_open");
+ document
+ .getElementById("noVNC_clipboard_button")
+ .classList.add("noVNC_selected");
+ },
+
+ closeClipboardPanel() {
+ document.getElementById("noVNC_clipboard").classList.remove("noVNC_open");
+ document
+ .getElementById("noVNC_clipboard_button")
+ .classList.remove("noVNC_selected");
+ },
+
+ toggleClipboardPanel() {
+ if (
+ document
+ .getElementById("noVNC_clipboard")
+ .classList.contains("noVNC_open")
+ ) {
+ UI.closeClipboardPanel();
+ } else {
+ UI.openClipboardPanel();
+ }
+ },
+
+ clipboardReceive(e) {
+ Log.Debug(">> UI.clipboardReceive: " + e.detail.text.substr(0, 40) + "...");
+ document.getElementById("noVNC_clipboard_text").value = e.detail.text;
+ Log.Debug("<< UI.clipboardReceive");
+ },
+
+ clipboardClear() {
+ document.getElementById("noVNC_clipboard_text").value = "";
+ UI.rfb.clipboardPasteFrom("");
+ },
+
+ clipboardSend() {
+ const text = document.getElementById("noVNC_clipboard_text").value;
+ Log.Debug(">> UI.clipboardSend: " + text.substr(0, 40) + "...");
+ UI.rfb.clipboardPasteFrom(text);
+ Log.Debug("<< UI.clipboardSend");
+ },
+
+ /* ------^-------
+ * /CLIPBOARD
+ * ==============
+ * CONNECTION
+ * ------v------*/
+
+ openConnectPanel() {
+ document.getElementById("noVNC_connect_dlg").classList.add("noVNC_open");
+ },
+
+ closeConnectPanel() {
+ document.getElementById("noVNC_connect_dlg").classList.remove("noVNC_open");
+ },
+
+ connect(event, password) {
+ // Ignore when rfb already exists
+ if (typeof UI.rfb !== "undefined") {
+ return;
+ }
+
+ const host = UI.getSetting("host");
+ const port = UI.getSetting("port");
+ const path = UI.getSetting("path");
+
+ if (typeof password === "undefined") {
+ password = WebUtil.getConfigVar("password");
+ UI.reconnectPassword = password;
+ }
+
+ if (password === null) {
+ password = undefined;
+ }
+
+ UI.hideStatus();
+
+ if (!host) {
+ Log.Error("Can't connect when host is: " + host);
+ UI.showStatus(_("Must set host"), "error");
+ return;
+ }
+
+ UI.closeConnectPanel();
+
+ UI.updateVisualState("connecting");
+
+ let url;
+
+ url = UI.getSetting("encrypt") ? "wss" : "ws";
+
+ url += "://" + host;
+ if (port) {
+ url += ":" + port;
+ }
+ url += "/" + path;
+
+ UI.rfb = new RFB(document.getElementById("noVNC_container"), url, {
+ shared: UI.getSetting("shared"),
+ repeaterID: UI.getSetting("repeaterID"),
+ credentials: { password: password },
+ });
+ UI.rfb.addEventListener("connect", UI.connectFinished);
+ UI.rfb.addEventListener("disconnect", UI.disconnectFinished);
+ UI.rfb.addEventListener("credentialsrequired", UI.credentials);
+ UI.rfb.addEventListener("securityfailure", UI.securityFailed);
+ UI.rfb.addEventListener("capabilities", UI.updatePowerButton);
+ UI.rfb.addEventListener("clipboard", UI.clipboardReceive);
+ UI.rfb.addEventListener("bell", UI.bell);
+ UI.rfb.addEventListener("desktopname", UI.updateDesktopName);
+ UI.rfb.clipViewport = UI.getSetting("view_clip");
+ UI.rfb.scaleViewport = UI.getSetting("resize") === "scale";
+ UI.rfb.resizeSession = UI.getSetting("resize") === "remote";
+ UI.rfb.qualityLevel = parseInt(UI.getSetting("quality"));
+ UI.rfb.compressionLevel = parseInt(UI.getSetting("compression"));
+ UI.rfb.showDotCursor = UI.getSetting("show_dot");
+
+ UI.updateViewOnly(); // requires UI.rfb
+
+ // ######################## CUSTOM CODE ########################
+ var clipboard = document.getElementById("clipboard");
+ var controlBar = document.getElementById("noVNC_control_bar");
+ var controlBarClipboard = document.getElementById("noVNC_clipboard_text");
+
+ UI.rfb.addEventListener("clipboard", function (e) {
+ clipboard.value = e.detail.text;
+ });
+
+ UI.rfb.addEventListener("connect", function (e) {
+ var viewer = document.getElementsByTagName("canvas")[0];
+
+ document.body.addEventListener("paste", function (e) {
+ try {
+ console.log("paste");
+ if (controlBarClipboard.classList.contains("noVNC_open")) {
return;
- }
-
- UI.connect(null, UI.reconnect_password);
- },
-
- cancelReconnect() {
- if (UI.reconnect_callback !== null) {
- clearTimeout(UI.reconnect_callback);
- UI.reconnect_callback = null;
- }
-
- UI.updateVisualState('disconnected');
-
- UI.openControlbar();
- UI.openConnectPanel();
- },
-
- connectFinished(e) {
- UI.connected = true;
- UI.inhibit_reconnect = false;
-
- let msg;
- if (UI.getSetting('encrypt')) {
- msg = _("Connected (encrypted) to ") + UI.desktopName;
- } else {
- msg = _("Connected (unencrypted) to ") + UI.desktopName;
- }
- UI.showStatus(msg);
- UI.updateVisualState('connected');
-
- // Do this last because it can only be used on rendered elements
- UI.rfb.focus();
- },
-
- disconnectFinished(e) {
- const wasConnected = UI.connected;
-
- // This variable is ideally set when disconnection starts, but
- // when the disconnection isn't clean or if it is initiated by
- // the server, we need to do it here as well since
- // UI.disconnect() won't be used in those cases.
- UI.connected = false;
-
- UI.rfb = undefined;
-
- if (!e.detail.clean) {
- UI.updateVisualState('disconnected');
- if (wasConnected) {
- UI.showStatus(_("Something went wrong, connection is closed"),
- 'error');
- } else {
- UI.showStatus(_("Failed to connect to server"), 'error');
- }
- } else if (UI.getSetting('reconnect', false) === true && !UI.inhibit_reconnect) {
- UI.updateVisualState('reconnecting');
-
- const delay = parseInt(UI.getSetting('reconnect_delay'));
- UI.reconnect_callback = setTimeout(UI.reconnect, delay);
+ }
+ if (UI.rfb != null) {
+ text = e.clipboardData.getData("text");
+ console.log("copy clipboard from local to vnc: " + text);
+ controlBarClipboard.value = text;
+ UI.rfb.clipboardPasteFrom(text);
+ }
+ } catch (error) {
+ console.error(error);
+ }
+ });
+
+ document.body.addEventListener("keydown", function (e) {
+ try {
+ console.log("document keydown: " + e.keyCode);
+ if (controlBar.classList.contains("noVNC_open")) {
return;
- } else {
- UI.updateVisualState('disconnected');
- UI.showStatus(_("Disconnected"), 'normal');
- }
-
- UI.openControlbar();
- UI.openConnectPanel();
- },
-
- securityFailed(e) {
- let msg = "";
- // On security failures we might get a string with a reason
- // directly from the server. Note that we can't control if
- // this string is translated or not.
- if ('reason' in e.detail) {
- msg = _("New connection has been rejected with reason: ") +
- e.detail.reason;
- } else {
- msg = _("New connection has been rejected");
- }
- UI.showStatus(msg, 'error');
- },
-
- /* ------^-------
- * /CONNECTION
- * ==============
- * PASSWORD
- * ------v------*/
-
- credentials(e) {
- // FIXME: handle more types
- document.getElementById('noVNC_password_dlg')
- .classList.add('noVNC_open');
-
- setTimeout(() => document
- .getElementById('noVNC_password_input').focus(), 100);
-
- Log.Warn("Server asked for a password");
- UI.showStatus(_("Password is required"), "warning");
- },
-
- setPassword(e) {
- // Prevent actually submitting the form
- e.preventDefault();
-
- const inputElem = document.getElementById('noVNC_password_input');
- const password = inputElem.value;
- // Clear the input after reading the password
- inputElem.value = "";
- UI.rfb.sendCredentials({
- password: password
- });
- UI.reconnect_password = password;
- document.getElementById('noVNC_password_dlg')
- .classList.remove('noVNC_open');
- },
-
- /* ------^-------
- * /PASSWORD
- * ==============
- * FULLSCREEN
- * ------v------*/
-
- toggleFullscreen() {
- if (document.fullscreenElement || // alternative standard method
- document.mozFullScreenElement || // currently working methods
- document.webkitFullscreenElement ||
- document.msFullscreenElement) {
- if (document.exitFullscreen) {
- document.exitFullscreen();
- } else if (document.mozCancelFullScreen) {
- document.mozCancelFullScreen();
- } else if (document.webkitExitFullscreen) {
- document.webkitExitFullscreen();
- } else if (document.msExitFullscreen) {
- document.msExitFullscreen();
- }
- } else {
- if (document.documentElement.requestFullscreen) {
- document.documentElement.requestFullscreen();
- } else if (document.documentElement.mozRequestFullScreen) {
- document.documentElement.mozRequestFullScreen();
- } else if (document.documentElement.webkitRequestFullscreen) {
- document.documentElement.webkitRequestFullscreen(Element.ALLOW_KEYBOARD_INPUT);
- } else if (document.body.msRequestFullscreen) {
- document.body.msRequestFullscreen();
- }
- }
- UI.updateFullscreenButton();
- },
-
- updateFullscreenButton() {
- if (document.fullscreenElement || // alternative standard method
- document.mozFullScreenElement || // currently working methods
- document.webkitFullscreenElement ||
- document.msFullscreenElement) {
- document.getElementById('noVNC_fullscreen_button')
- .classList.add("noVNC_selected");
- } else {
- document.getElementById('noVNC_fullscreen_button')
- .classList.remove("noVNC_selected");
- }
- },
-
- /* ------^-------
- * /FULLSCREEN
- * ==============
- * RESIZE
- * ------v------*/
-
- // Apply remote resizing or local scaling
- applyResizeMode() {
- if (!UI.rfb) return;
-
- UI.rfb.scaleViewport = UI.getSetting('resize') === 'scale';
- UI.rfb.resizeSession = UI.getSetting('resize') === 'remote';
- },
-
- /* ------^-------
- * /RESIZE
- * ==============
- * VIEW CLIPPING
- * ------v------*/
-
- // Update viewport clipping property for the connection. The normal
- // case is to get the value from the setting. There are special cases
- // for when the viewport is scaled or when a touch device is used.
- updateViewClip() {
- if (!UI.rfb) return;
-
- const scaling = UI.getSetting('resize') === 'scale';
-
- if (scaling) {
- // Can't be clipping if viewport is scaled to fit
- UI.forceSetting('view_clip', false);
- UI.rfb.clipViewport = false;
- } else if (isIOS() || isAndroid()) {
- // iOS and Android usually have shit scrollbars
- UI.forceSetting('view_clip', true);
- UI.rfb.clipViewport = true;
- } else {
- UI.enableSetting('view_clip');
- UI.rfb.clipViewport = UI.getSetting('view_clip');
- }
-
- // Changing the viewport may change the state of
- // the dragging button
- UI.updateViewDrag();
- },
-
- /* ------^-------
- * /VIEW CLIPPING
- * ==============
- * VIEWDRAG
- * ------v------*/
-
- toggleViewDrag() {
- if (!UI.rfb) return;
-
- UI.rfb.dragViewport = !UI.rfb.dragViewport;
- UI.updateViewDrag();
- },
-
- updateViewDrag() {
- if (!UI.connected) return;
-
- const viewDragButton = document.getElementById('noVNC_view_drag_button');
-
- if (!UI.rfb.clipViewport && UI.rfb.dragViewport) {
- // We are no longer clipping the viewport. Make sure
- // viewport drag isn't active when it can't be used.
- UI.rfb.dragViewport = false;
- }
-
- if (UI.rfb.dragViewport) {
- viewDragButton.classList.add("noVNC_selected");
- } else {
- viewDragButton.classList.remove("noVNC_selected");
- }
-
- // Different behaviour for touch vs non-touch
- // The button is disabled instead of hidden on touch devices
- if (isTouchDevice) {
- viewDragButton.classList.remove("noVNC_hidden");
-
- if (UI.rfb.clipViewport) {
- viewDragButton.disabled = false;
- } else {
- viewDragButton.disabled = true;
+ }
+ if (e.keyCode !== 86) {
+ e.preventDefault();
+ } else {
+ if (navigator.clipboard) {
+ navigator.clipboard
+ .readText()
+ .then((text) => {
+ console.log("copy clipboard from local to vnc: " + text);
+ controlBarClipboard.value = text;
+ UI.rfb.clipboardPasteFrom(text);
+ })
+ .catch((err) => {
+ console.error("Failed to read clipboard contents: ", err);
+ });
}
- } else {
- viewDragButton.disabled = false;
-
- if (UI.rfb.clipViewport) {
- viewDragButton.classList.remove("noVNC_hidden");
- } else {
- viewDragButton.classList.add("noVNC_hidden");
+ }
+ setTimeout(function () {
+ viewer.dispatchEvent(new e.constructor(e.type, e));
+ if (UI.rfb != null) {
+ UI.rfb.focus();
}
+ }, 1);
+ } catch (error) {
+ console.error(error);
}
- },
-
- /* ------^-------
- * /VIEWDRAG
- * ==============
- * KEYBOARD
- * ------v------*/
-
- showVirtualKeyboard() {
- if (!isTouchDevice) return;
-
- const input = document.getElementById('noVNC_keyboardinput');
-
- if (document.activeElement == input) return;
-
- input.focus();
+ });
+ document.body.addEventListener("keyup", function (e) {
try {
- const l = input.value.length;
- // Move the caret to the end
- input.setSelectionRange(l, l);
- } catch (err) {
- // setSelectionRange is undefined in Google Chrome
- }
- },
-
- hideVirtualKeyboard() {
- if (!isTouchDevice) return;
-
- const input = document.getElementById('noVNC_keyboardinput');
-
- if (document.activeElement != input) return;
-
- input.blur();
- },
-
- toggleVirtualKeyboard() {
- if (document.getElementById('noVNC_keyboard_button')
- .classList.contains("noVNC_selected")) {
- UI.hideVirtualKeyboard();
- } else {
- UI.showVirtualKeyboard();
- }
- },
-
- onfocusVirtualKeyboard(event) {
- document.getElementById('noVNC_keyboard_button')
- .classList.add("noVNC_selected");
- if (UI.rfb) {
- UI.rfb.focusOnClick = false;
- }
- },
-
- onblurVirtualKeyboard(event) {
- document.getElementById('noVNC_keyboard_button')
- .classList.remove("noVNC_selected");
- if (UI.rfb) {
- UI.rfb.focusOnClick = true;
- }
- },
-
- keepVirtualKeyboard(event) {
- const input = document.getElementById('noVNC_keyboardinput');
-
- // Only prevent focus change if the virtual keyboard is active
- if (document.activeElement != input) {
+ console.log("document keyup");
+ if (controlBar.classList.contains("noVNC_open")) {
return;
- }
+ }
- // Only allow focus to move to other elements that need
- // focus to function properly
- if (event.target.form !== undefined) {
- switch (event.target.type) {
- case 'text':
- case 'email':
- case 'search':
- case 'password':
- case 'tel':
- case 'url':
- case 'textarea':
- case 'select-one':
- case 'select-multiple':
- return;
+ if (e.keyCode !== 86) {
+ e.preventDefault();
+ }
+ setTimeout(function () {
+ viewer.dispatchEvent(new e.constructor(e.type, e));
+ if (UI.rfb != null) {
+ UI.rfb.focus();
}
+ }, 1);
+ } catch (error) {
+ console.error(error);
}
+ });
- event.preventDefault();
- },
-
- keyboardinputReset() {
- const kbi = document.getElementById('noVNC_keyboardinput');
- kbi.value = new Array(UI.defaultKeyboardinputLen).join("_");
- UI.lastKeyboardinput = kbi.value;
- },
-
- keyEvent(keysym, code, down) {
- if (!UI.rfb) return;
-
- UI.rfb.sendKey(keysym, code, down);
- },
-
- // When normal keyboard events are left uncought, use the input events from
- // the keyboardinput element instead and generate the corresponding key events.
- // This code is required since some browsers on Android are inconsistent in
- // sending keyCodes in the normal keyboard events when using on screen keyboards.
- keyInput(event) {
-
- if (!UI.rfb) return;
-
- const newValue = event.target.value;
-
- if (!UI.lastKeyboardinput) {
- UI.keyboardinputReset();
- }
- const oldValue = UI.lastKeyboardinput;
-
- let newLen;
+ viewer.addEventListener("keydown", function (e) {
try {
- // Try to check caret position since whitespace at the end
- // will not be considered by value.length in some browsers
- newLen = Math.max(event.target.selectionStart, newValue.length);
- } catch (err) {
- // selectionStart is undefined in Google Chrome
- newLen = newValue.length;
- }
- const oldLen = oldValue.length;
-
- let inputs = newLen - oldLen;
- let backspaces = inputs < 0 ? -inputs : 0;
-
- // Compare the old string with the new to account for
- // text-corrections or other input that modify existing text
- for (let i = 0; i < Math.min(oldLen, newLen); i++) {
- if (newValue.charAt(i) != oldValue.charAt(i)) {
- inputs = newLen - i;
- backspaces = oldLen - i;
- break;
+ console.log("viewer keydown");
+ if (e.ctrlKey) {
+ if (controlBar.classList.contains("noVNC_open")) {
+ return;
}
+ document.activeElement.blur();
+ }
+ } catch (error) {
+ console.error(error);
}
+ });
- // Send the key events
- for (let i = 0; i < backspaces; i++) {
- UI.rfb.sendKey(KeyTable.XK_BackSpace, "Backspace");
- }
- for (let i = newLen - inputs; i < newLen; i++) {
- UI.rfb.sendKey(keysyms.lookup(newValue.charCodeAt(i)));
- }
-
- // Control the text content length in the keyboardinput element
- if (newLen > 2 * UI.defaultKeyboardinputLen) {
- UI.keyboardinputReset();
- } else if (newLen < 1) {
- // There always have to be some text in the keyboardinput
- // element with which backspace can interact.
- UI.keyboardinputReset();
- // This sometimes causes the keyboard to disappear for a second
- // but it is required for the android keyboard to recognize that
- // text has been added to the field
- event.target.blur();
- // This has to be ran outside of the input handler in order to work
- setTimeout(event.target.focus.bind(event.target), 0);
- } else {
- UI.lastKeyboardinput = newValue;
- }
- },
-
- /* ------^-------
- * /KEYBOARD
- * ==============
- * EXTRA KEYS
- * ------v------*/
-
- openExtraKeys() {
- UI.closeAllPanels();
- UI.openControlbar();
-
- document.getElementById('noVNC_modifiers')
- .classList.add("noVNC_open");
- document.getElementById('noVNC_toggle_extra_keys_button')
- .classList.add("noVNC_selected");
- },
-
- closeExtraKeys() {
- document.getElementById('noVNC_modifiers')
- .classList.remove("noVNC_open");
- document.getElementById('noVNC_toggle_extra_keys_button')
- .classList.remove("noVNC_selected");
- },
-
- toggleExtraKeys() {
- if (document.getElementById('noVNC_modifiers')
- .classList.contains("noVNC_open")) {
- UI.closeExtraKeys();
- } else {
- UI.openExtraKeys();
- }
- },
-
- sendEsc() {
- UI.rfb.sendKey(KeyTable.XK_Escape, "Escape");
- },
-
- sendTab() {
- UI.rfb.sendKey(KeyTable.XK_Tab);
- },
-
- toggleCtrl() {
- const btn = document.getElementById('noVNC_toggle_ctrl_button');
- if (btn.classList.contains("noVNC_selected")) {
- UI.rfb.sendKey(KeyTable.XK_Control_L, "ControlLeft", false);
- btn.classList.remove("noVNC_selected");
- } else {
- UI.rfb.sendKey(KeyTable.XK_Control_L, "ControlLeft", true);
- btn.classList.add("noVNC_selected");
- }
- },
-
- toggleWindows() {
- const btn = document.getElementById('noVNC_toggle_windows_button');
- if (btn.classList.contains("noVNC_selected")) {
- UI.rfb.sendKey(KeyTable.XK_Super_L, "MetaLeft", false);
- btn.classList.remove("noVNC_selected");
- } else {
- UI.rfb.sendKey(KeyTable.XK_Super_L, "MetaLeft", true);
- btn.classList.add("noVNC_selected");
- }
- },
-
- toggleAlt() {
- const btn = document.getElementById('noVNC_toggle_alt_button');
- if (btn.classList.contains("noVNC_selected")) {
- UI.rfb.sendKey(KeyTable.XK_Alt_L, "AltLeft", false);
- btn.classList.remove("noVNC_selected");
- } else {
- UI.rfb.sendKey(KeyTable.XK_Alt_L, "AltLeft", true);
- btn.classList.add("noVNC_selected");
- }
- },
-
- sendCtrlAltDel() {
- UI.rfb.sendCtrlAltDel();
- },
-
- /* ------^-------
- * /EXTRA KEYS
- * ==============
- * MISC
- * ------v------*/
-
- setMouseButton(num) {
- const view_only = UI.rfb.viewOnly;
- if (UI.rfb && !view_only) {
- UI.rfb.touchButton = num;
- }
-
- const blist = [0, 1, 2, 4];
- for (let b = 0; b < blist.length; b++) {
- const button = document.getElementById('noVNC_mouse_button' +
- blist[b]);
- if (blist[b] === num && !view_only) {
- button.classList.remove("noVNC_hidden");
- } else {
- button.classList.add("noVNC_hidden");
+ viewer.addEventListener("keyup", function (e) {
+ try {
+ console.log("viewer keyup: " + e.keyCode);
+ if (e.ctrlKey && [67, 88].includes(e.keyCode)) {
+ if (controlBar.classList.contains("noVNC_open")) {
+ return;
}
- }
- },
-
- updateViewOnly() {
- if (!UI.rfb) return;
- UI.rfb.viewOnly = UI.getSetting('view_only');
-
- // Hide input related buttons in view only mode
- if (UI.rfb.viewOnly) {
- document.getElementById('noVNC_keyboard_button')
- .classList.add('noVNC_hidden');
- document.getElementById('noVNC_toggle_extra_keys_button')
- .classList.add('noVNC_hidden');
- document.getElementById('noVNC_mouse_button' + UI.rfb.touchButton)
- .classList.add('noVNC_hidden');
- } else {
- document.getElementById('noVNC_keyboard_button')
- .classList.remove('noVNC_hidden');
- document.getElementById('noVNC_toggle_extra_keys_button')
- .classList.remove('noVNC_hidden');
- document.getElementById('noVNC_mouse_button' + UI.rfb.touchButton)
- .classList.remove('noVNC_hidden');
- }
- },
-
- updateShowDotCursor() {
- if (!UI.rfb) return;
- UI.rfb.showDotCursor = UI.getSetting('show_dot');
- },
-
- updateLogging() {
- WebUtil.init_logging(UI.getSetting('logging'));
- },
-
- updateDesktopName(e) {
- UI.desktopName = e.detail.name;
- // Display the desktop name in the document title
- document.title = e.detail.name + " - noVNC";
- },
-
- bell(e) {
- if (WebUtil.getConfigVar('bell', 'on') === 'on') {
- const promise = document.getElementById('noVNC_bell').play();
- // The standards disagree on the return value here
- if (promise) {
- promise.catch((e) => {
- if (e.name === "NotAllowedError") {
- // Ignore when the browser doesn't let us play audio.
- // It is common that the browsers require audio to be
- // initiated from a user action.
- } else {
- Log.Error("Unable to play bell: " + e);
- }
- });
+ if (clipboard != null) {
+ console.log(
+ "copy clipboard from vnc to local: " + clipboard.value
+ );
+ clipboard.focus();
+ clipboard.select();
+
+ document.execCommand("copy");
+ if (UI.rfb != null) {
+ UI.rfb.focus();
+ }
}
- }
- },
-
- //Helper to add options to dropdown.
- addOption(selectbox, text, value) {
- const optn = document.createElement("OPTION");
- optn.text = text;
- optn.value = value;
- selectbox.options.add(optn);
- },
-
- /* ------^-------
- * /MISC
- * ==============
- */
+ }
+ } catch (error) {
+ console.error(error);
+ }
+ });
+ });
+ // ######################## END CUSTOM CODE ########################
+ },
+
+ disconnect() {
+ UI.rfb.disconnect();
+
+ UI.connected = false;
+
+ // Disable automatic reconnecting
+ UI.inhibitReconnect = true;
+
+ UI.updateVisualState("disconnecting");
+
+ // Don't display the connection settings until we're actually disconnected
+ },
+
+ reconnect() {
+ UI.reconnectCallback = null;
+
+ // if reconnect has been disabled in the meantime, do nothing.
+ if (UI.inhibitReconnect) {
+ return;
+ }
+
+ UI.connect(null, UI.reconnectPassword);
+ },
+
+ cancelReconnect() {
+ if (UI.reconnectCallback !== null) {
+ clearTimeout(UI.reconnectCallback);
+ UI.reconnectCallback = null;
+ }
+
+ UI.updateVisualState("disconnected");
+
+ UI.openControlbar();
+ UI.openConnectPanel();
+ },
+
+ connectFinished(e) {
+ UI.connected = true;
+ UI.inhibitReconnect = false;
+
+ let msg;
+ if (UI.getSetting("encrypt")) {
+ msg = _("Connected (encrypted) to ") + UI.desktopName;
+ } else {
+ msg = _("Connected (unencrypted) to ") + UI.desktopName;
+ }
+ UI.showStatus(msg);
+ UI.updateVisualState("connected");
+
+ // Do this last because it can only be used on rendered elements
+ UI.rfb.focus();
+ },
+
+ disconnectFinished(e) {
+ const wasConnected = UI.connected;
+
+ // This variable is ideally set when disconnection starts, but
+ // when the disconnection isn't clean or if it is initiated by
+ // the server, we need to do it here as well since
+ // UI.disconnect() won't be used in those cases.
+ UI.connected = false;
+
+ UI.rfb = undefined;
+
+ if (!e.detail.clean) {
+ UI.updateVisualState("disconnected");
+ if (wasConnected) {
+ UI.showStatus(_("Something went wrong, connection is closed"), "error");
+ } else {
+ UI.showStatus(_("Failed to connect to server"), "error");
+ }
+ } else if (
+ UI.getSetting("reconnect", false) === true &&
+ !UI.inhibitReconnect
+ ) {
+ UI.updateVisualState("reconnecting");
+
+ const delay = parseInt(UI.getSetting("reconnect_delay"));
+ UI.reconnectCallback = setTimeout(UI.reconnect, delay);
+ return;
+ } else {
+ UI.updateVisualState("disconnected");
+ UI.showStatus(_("Disconnected"), "normal");
+ }
+
+ document.title = PAGE_TITLE;
+
+ UI.openControlbar();
+ UI.openConnectPanel();
+ },
+
+ securityFailed(e) {
+ let msg = "";
+ // On security failures we might get a string with a reason
+ // directly from the server. Note that we can't control if
+ // this string is translated or not.
+ if ("reason" in e.detail) {
+ msg =
+ _("New connection has been rejected with reason: ") + e.detail.reason;
+ } else {
+ msg = _("New connection has been rejected");
+ }
+ UI.showStatus(msg, "error");
+ },
+
+ /* ------^-------
+ * /CONNECTION
+ * ==============
+ * PASSWORD
+ * ------v------*/
+
+ credentials(e) {
+ // FIXME: handle more types
+
+ document
+ .getElementById("noVNC_username_block")
+ .classList.remove("noVNC_hidden");
+ document
+ .getElementById("noVNC_password_block")
+ .classList.remove("noVNC_hidden");
+
+ let inputFocus = "none";
+ if (e.detail.types.indexOf("username") === -1) {
+ document
+ .getElementById("noVNC_username_block")
+ .classList.add("noVNC_hidden");
+ } else {
+ inputFocus = inputFocus === "none" ? "noVNC_username_input" : inputFocus;
+ }
+ if (e.detail.types.indexOf("password") === -1) {
+ document
+ .getElementById("noVNC_password_block")
+ .classList.add("noVNC_hidden");
+ } else {
+ inputFocus = inputFocus === "none" ? "noVNC_password_input" : inputFocus;
+ }
+ document
+ .getElementById("noVNC_credentials_dlg")
+ .classList.add("noVNC_open");
+
+ setTimeout(() => document.getElementById(inputFocus).focus(), 100);
+
+ Log.Warn("Server asked for credentials");
+ UI.showStatus(_("Credentials are required"), "warning");
+ },
+
+ setCredentials(e) {
+ // Prevent actually submitting the form
+ e.preventDefault();
+
+ let inputElemUsername = document.getElementById("noVNC_username_input");
+ const username = inputElemUsername.value;
+
+ let inputElemPassword = document.getElementById("noVNC_password_input");
+ const password = inputElemPassword.value;
+ // Clear the input after reading the password
+ inputElemPassword.value = "";
+
+ UI.rfb.sendCredentials({ username: username, password: password });
+ UI.reconnectPassword = password;
+ document
+ .getElementById("noVNC_credentials_dlg")
+ .classList.remove("noVNC_open");
+ },
+
+ /* ------^-------
+ * /PASSWORD
+ * ==============
+ * FULLSCREEN
+ * ------v------*/
+
+ toggleFullscreen() {
+ if (
+ document.fullscreenElement || // alternative standard method
+ document.mozFullScreenElement || // currently working methods
+ document.webkitFullscreenElement ||
+ document.msFullscreenElement
+ ) {
+ if (document.exitFullscreen) {
+ document.exitFullscreen();
+ } else if (document.mozCancelFullScreen) {
+ document.mozCancelFullScreen();
+ } else if (document.webkitExitFullscreen) {
+ document.webkitExitFullscreen();
+ } else if (document.msExitFullscreen) {
+ document.msExitFullscreen();
+ }
+ } else {
+ if (document.documentElement.requestFullscreen) {
+ document.documentElement.requestFullscreen();
+ } else if (document.documentElement.mozRequestFullScreen) {
+ document.documentElement.mozRequestFullScreen();
+ } else if (document.documentElement.webkitRequestFullscreen) {
+ document.documentElement.webkitRequestFullscreen(
+ Element.ALLOW_KEYBOARD_INPUT
+ );
+ } else if (document.body.msRequestFullscreen) {
+ document.body.msRequestFullscreen();
+ }
+ }
+ UI.updateFullscreenButton();
+ },
+
+ updateFullscreenButton() {
+ if (
+ document.fullscreenElement || // alternative standard method
+ document.mozFullScreenElement || // currently working methods
+ document.webkitFullscreenElement ||
+ document.msFullscreenElement
+ ) {
+ document
+ .getElementById("noVNC_fullscreen_button")
+ .classList.add("noVNC_selected");
+ } else {
+ document
+ .getElementById("noVNC_fullscreen_button")
+ .classList.remove("noVNC_selected");
+ }
+ },
+
+ /* ------^-------
+ * /FULLSCREEN
+ * ==============
+ * RESIZE
+ * ------v------*/
+
+ // Apply remote resizing or local scaling
+ applyResizeMode() {
+ if (!UI.rfb) return;
+
+ UI.rfb.scaleViewport = UI.getSetting("resize") === "scale";
+ UI.rfb.resizeSession = UI.getSetting("resize") === "remote";
+ },
+
+ /* ------^-------
+ * /RESIZE
+ * ==============
+ * VIEW CLIPPING
+ * ------v------*/
+
+ // Update viewport clipping property for the connection. The normal
+ // case is to get the value from the setting. There are special cases
+ // for when the viewport is scaled or when a touch device is used.
+ updateViewClip() {
+ if (!UI.rfb) return;
+
+ const scaling = UI.getSetting("resize") === "scale";
+
+ if (scaling) {
+ // Can't be clipping if viewport is scaled to fit
+ UI.forceSetting("view_clip", false);
+ UI.rfb.clipViewport = false;
+ } else if (!hasScrollbarGutter) {
+ // Some platforms have scrollbars that are difficult
+ // to use in our case, so we always use our own panning
+ UI.forceSetting("view_clip", true);
+ UI.rfb.clipViewport = true;
+ } else {
+ UI.enableSetting("view_clip");
+ UI.rfb.clipViewport = UI.getSetting("view_clip");
+ }
+
+ // Changing the viewport may change the state of
+ // the dragging button
+ UI.updateViewDrag();
+ },
+
+ /* ------^-------
+ * /VIEW CLIPPING
+ * ==============
+ * VIEWDRAG
+ * ------v------*/
+
+ toggleViewDrag() {
+ if (!UI.rfb) return;
+
+ UI.rfb.dragViewport = !UI.rfb.dragViewport;
+ UI.updateViewDrag();
+ },
+
+ updateViewDrag() {
+ if (!UI.connected) return;
+
+ const viewDragButton = document.getElementById("noVNC_view_drag_button");
+
+ if (!UI.rfb.clipViewport && UI.rfb.dragViewport) {
+ // We are no longer clipping the viewport. Make sure
+ // viewport drag isn't active when it can't be used.
+ UI.rfb.dragViewport = false;
+ }
+
+ if (UI.rfb.dragViewport) {
+ viewDragButton.classList.add("noVNC_selected");
+ } else {
+ viewDragButton.classList.remove("noVNC_selected");
+ }
+
+ if (UI.rfb.clipViewport) {
+ viewDragButton.classList.remove("noVNC_hidden");
+ } else {
+ viewDragButton.classList.add("noVNC_hidden");
+ }
+ },
+
+ /* ------^-------
+ * /VIEWDRAG
+ * ==============
+ * QUALITY
+ * ------v------*/
+
+ updateQuality() {
+ if (!UI.rfb) return;
+
+ UI.rfb.qualityLevel = parseInt(UI.getSetting("quality"));
+ },
+
+ /* ------^-------
+ * /QUALITY
+ * ==============
+ * COMPRESSION
+ * ------v------*/
+
+ updateCompression() {
+ if (!UI.rfb) return;
+
+ UI.rfb.compressionLevel = parseInt(UI.getSetting("compression"));
+ },
+
+ /* ------^-------
+ * /COMPRESSION
+ * ==============
+ * KEYBOARD
+ * ------v------*/
+
+ showVirtualKeyboard() {
+ if (!isTouchDevice) return;
+
+ const input = document.getElementById("noVNC_keyboardinput");
+
+ if (document.activeElement == input) return;
+
+ input.focus();
+
+ try {
+ const l = input.value.length;
+ // Move the caret to the end
+ input.setSelectionRange(l, l);
+ } catch (err) {
+ // setSelectionRange is undefined in Google Chrome
+ }
+ },
+
+ hideVirtualKeyboard() {
+ if (!isTouchDevice) return;
+
+ const input = document.getElementById("noVNC_keyboardinput");
+
+ if (document.activeElement != input) return;
+
+ input.blur();
+ },
+
+ toggleVirtualKeyboard() {
+ if (
+ document
+ .getElementById("noVNC_keyboard_button")
+ .classList.contains("noVNC_selected")
+ ) {
+ UI.hideVirtualKeyboard();
+ } else {
+ UI.showVirtualKeyboard();
+ }
+ },
+
+ onfocusVirtualKeyboard(event) {
+ document
+ .getElementById("noVNC_keyboard_button")
+ .classList.add("noVNC_selected");
+ if (UI.rfb) {
+ UI.rfb.focusOnClick = false;
+ }
+ },
+
+ onblurVirtualKeyboard(event) {
+ document
+ .getElementById("noVNC_keyboard_button")
+ .classList.remove("noVNC_selected");
+ if (UI.rfb) {
+ UI.rfb.focusOnClick = true;
+ }
+ },
+
+ keepVirtualKeyboard(event) {
+ const input = document.getElementById("noVNC_keyboardinput");
+
+ // Only prevent focus change if the virtual keyboard is active
+ if (document.activeElement != input) {
+ return;
+ }
+
+ // Only allow focus to move to other elements that need
+ // focus to function properly
+ if (event.target.form !== undefined) {
+ switch (event.target.type) {
+ case "text":
+ case "email":
+ case "search":
+ case "password":
+ case "tel":
+ case "url":
+ case "textarea":
+ case "select-one":
+ case "select-multiple":
+ return;
+ }
+ }
+
+ event.preventDefault();
+ },
+
+ keyboardinputReset() {
+ const kbi = document.getElementById("noVNC_keyboardinput");
+ kbi.value = new Array(UI.defaultKeyboardinputLen).join("_");
+ UI.lastKeyboardinput = kbi.value;
+ },
+
+ keyEvent(keysym, code, down) {
+ if (!UI.rfb) return;
+
+ UI.rfb.sendKey(keysym, code, down);
+ },
+
+ // When normal keyboard events are left uncought, use the input events from
+ // the keyboardinput element instead and generate the corresponding key events.
+ // This code is required since some browsers on Android are inconsistent in
+ // sending keyCodes in the normal keyboard events when using on screen keyboards.
+ keyInput(event) {
+ if (!UI.rfb) return;
+
+ const newValue = event.target.value;
+
+ if (!UI.lastKeyboardinput) {
+ UI.keyboardinputReset();
+ }
+ const oldValue = UI.lastKeyboardinput;
+
+ let newLen;
+ try {
+ // Try to check caret position since whitespace at the end
+ // will not be considered by value.length in some browsers
+ newLen = Math.max(event.target.selectionStart, newValue.length);
+ } catch (err) {
+ // selectionStart is undefined in Google Chrome
+ newLen = newValue.length;
+ }
+ const oldLen = oldValue.length;
+
+ let inputs = newLen - oldLen;
+ let backspaces = inputs < 0 ? -inputs : 0;
+
+ // Compare the old string with the new to account for
+ // text-corrections or other input that modify existing text
+ for (let i = 0; i < Math.min(oldLen, newLen); i++) {
+ if (newValue.charAt(i) != oldValue.charAt(i)) {
+ inputs = newLen - i;
+ backspaces = oldLen - i;
+ break;
+ }
+ }
+
+ // Send the key events
+ for (let i = 0; i < backspaces; i++) {
+ UI.rfb.sendKey(KeyTable.XK_BackSpace, "Backspace");
+ }
+ for (let i = newLen - inputs; i < newLen; i++) {
+ UI.rfb.sendKey(keysyms.lookup(newValue.charCodeAt(i)));
+ }
+
+ // Control the text content length in the keyboardinput element
+ if (newLen > 2 * UI.defaultKeyboardinputLen) {
+ UI.keyboardinputReset();
+ } else if (newLen < 1) {
+ // There always have to be some text in the keyboardinput
+ // element with which backspace can interact.
+ UI.keyboardinputReset();
+ // This sometimes causes the keyboard to disappear for a second
+ // but it is required for the android keyboard to recognize that
+ // text has been added to the field
+ event.target.blur();
+ // This has to be ran outside of the input handler in order to work
+ setTimeout(event.target.focus.bind(event.target), 0);
+ } else {
+ UI.lastKeyboardinput = newValue;
+ }
+ },
+
+ /* ------^-------
+ * /KEYBOARD
+ * ==============
+ * EXTRA KEYS
+ * ------v------*/
+
+ openExtraKeys() {
+ UI.closeAllPanels();
+ UI.openControlbar();
+
+ document.getElementById("noVNC_modifiers").classList.add("noVNC_open");
+ document
+ .getElementById("noVNC_toggle_extra_keys_button")
+ .classList.add("noVNC_selected");
+ },
+
+ closeExtraKeys() {
+ document.getElementById("noVNC_modifiers").classList.remove("noVNC_open");
+ document
+ .getElementById("noVNC_toggle_extra_keys_button")
+ .classList.remove("noVNC_selected");
+ },
+
+ toggleExtraKeys() {
+ if (
+ document
+ .getElementById("noVNC_modifiers")
+ .classList.contains("noVNC_open")
+ ) {
+ UI.closeExtraKeys();
+ } else {
+ UI.openExtraKeys();
+ }
+ },
+
+ sendEsc() {
+ UI.sendKey(KeyTable.XK_Escape, "Escape");
+ },
+
+ sendTab() {
+ UI.sendKey(KeyTable.XK_Tab, "Tab");
+ },
+
+ toggleCtrl() {
+ const btn = document.getElementById("noVNC_toggle_ctrl_button");
+ if (btn.classList.contains("noVNC_selected")) {
+ UI.sendKey(KeyTable.XK_Control_L, "ControlLeft", false);
+ btn.classList.remove("noVNC_selected");
+ } else {
+ UI.sendKey(KeyTable.XK_Control_L, "ControlLeft", true);
+ btn.classList.add("noVNC_selected");
+ }
+ },
+
+ toggleWindows() {
+ const btn = document.getElementById("noVNC_toggle_windows_button");
+ if (btn.classList.contains("noVNC_selected")) {
+ UI.sendKey(KeyTable.XK_Super_L, "MetaLeft", false);
+ btn.classList.remove("noVNC_selected");
+ } else {
+ UI.sendKey(KeyTable.XK_Super_L, "MetaLeft", true);
+ btn.classList.add("noVNC_selected");
+ }
+ },
+
+ toggleAlt() {
+ const btn = document.getElementById("noVNC_toggle_alt_button");
+ if (btn.classList.contains("noVNC_selected")) {
+ UI.sendKey(KeyTable.XK_Alt_L, "AltLeft", false);
+ btn.classList.remove("noVNC_selected");
+ } else {
+ UI.sendKey(KeyTable.XK_Alt_L, "AltLeft", true);
+ btn.classList.add("noVNC_selected");
+ }
+ },
+
+ sendCtrlAltDel() {
+ UI.rfb.sendCtrlAltDel();
+ // See below
+ UI.rfb.focus();
+ UI.idleControlbar();
+ },
+
+ sendKey(keysym, code, down) {
+ UI.rfb.sendKey(keysym, code, down);
+
+ // Move focus to the screen in order to be able to use the
+ // keyboard right after these extra keys.
+ // The exception is when a virtual keyboard is used, because
+ // if we focus the screen the virtual keyboard would be closed.
+ // In this case we focus our special virtual keyboard input
+ // element instead.
+ if (
+ document
+ .getElementById("noVNC_keyboard_button")
+ .classList.contains("noVNC_selected")
+ ) {
+ document.getElementById("noVNC_keyboardinput").focus();
+ } else {
+ UI.rfb.focus();
+ }
+ // fade out the controlbar to highlight that
+ // the focus has been moved to the screen
+ UI.idleControlbar();
+ },
+
+ /* ------^-------
+ * /EXTRA KEYS
+ * ==============
+ * MISC
+ * ------v------*/
+
+ updateViewOnly() {
+ if (!UI.rfb) return;
+ UI.rfb.viewOnly = UI.getSetting("view_only");
+
+ // Hide input related buttons in view only mode
+ if (UI.rfb.viewOnly) {
+ document
+ .getElementById("noVNC_keyboard_button")
+ .classList.add("noVNC_hidden");
+ document
+ .getElementById("noVNC_toggle_extra_keys_button")
+ .classList.add("noVNC_hidden");
+ document
+ .getElementById("noVNC_clipboard_button")
+ .classList.add("noVNC_hidden");
+ } else {
+ document
+ .getElementById("noVNC_keyboard_button")
+ .classList.remove("noVNC_hidden");
+ document
+ .getElementById("noVNC_toggle_extra_keys_button")
+ .classList.remove("noVNC_hidden");
+ document
+ .getElementById("noVNC_clipboard_button")
+ .classList.remove("noVNC_hidden");
+ }
+ },
+
+ updateShowDotCursor() {
+ if (!UI.rfb) return;
+ UI.rfb.showDotCursor = UI.getSetting("show_dot");
+ },
+
+ updateLogging() {
+ WebUtil.initLogging(UI.getSetting("logging"));
+ },
+
+ updateDesktopName(e) {
+ UI.desktopName = e.detail.name;
+ // Display the desktop name in the document title
+ document.title = e.detail.name + " - " + PAGE_TITLE;
+ },
+
+ bell(e) {
+ if (WebUtil.getConfigVar("bell", "on") === "on") {
+ const promise = document.getElementById("noVNC_bell").play();
+ // The standards disagree on the return value here
+ if (promise) {
+ promise.catch((e) => {
+ if (e.name === "NotAllowedError") {
+ // Ignore when the browser doesn't let us play audio.
+ // It is common that the browsers require audio to be
+ // initiated from a user action.
+ } else {
+ Log.Error("Unable to play bell: " + e);
+ }
+ });
+ }
+ }
+ },
+
+ //Helper to add options to dropdown.
+ addOption(selectbox, text, value) {
+ const optn = document.createElement("OPTION");
+ optn.text = text;
+ optn.value = value;
+ selectbox.options.add(optn);
+ },
+
+ /* ------^-------
+ * /MISC
+ * ==============
+ */
};
// Set up translations
-const LINGUAS = ["cs", "de", "el", "es", "ko", "nl", "pl", "ru", "sv", "tr", "zh_CN", "zh_TW"];
+const LINGUAS = [
+ "cs",
+ "de",
+ "el",
+ "es",
+ "ja",
+ "ko",
+ "nl",
+ "pl",
+ "ru",
+ "sv",
+ "tr",
+ "zh_CN",
+ "zh_TW",
+];
l10n.setup(LINGUAS);
if (l10n.language === "en" || l10n.dictionary !== undefined) {
- UI.prime();
+ UI.prime();
} else {
- WebUtil.fetchJSON('app/locale/' + l10n.language + '.json')
- .then((translations) => {
- l10n.dictionary = translations;
- })
- .catch(err => Log.Error("Failed to load translations: " + err))
- .then(UI.prime);
+ WebUtil.fetchJSON("app/locale/" + l10n.language + ".json")
+ .then((translations) => {
+ l10n.dictionary = translations;
+ })
+ .catch((err) => Log.Error("Failed to load translations: " + err))
+ .then(UI.prime);
}
-export default UI;
\ No newline at end of file
+export default UI;
diff --git a/resources/novnc/vnc.html b/resources/novnc/vnc.html
index dcf759a1..66e1d660 100644
--- a/resources/novnc/vnc.html
+++ b/resources/novnc/vnc.html
@@ -1,10 +1,9 @@
-
-
+
- Desktop VNC
+ noVNC
+
+
-
+
+
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
-
-
-
+
+
+
-
-
-
-
+
+
+
+
-
+
@@ -57,288 +156,454 @@
-
-
+
-
+
-
-
-
+
+
-
-
noVNC encountered an error:
-
-
-
+
+
noVNC encountered an error:
+
+
+
-
-
-
+
+
-
-
-
-
-
no VNC
-
-
-
-
-
-
-
-
-
-
-
+
+
+
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
no VNC
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
+
+
+
+
+
-
- Power
-
-
-
-
-
+
+ Power
+
+
+
+
-
-
-
-
+
+
+
+
+
-
- Clipboard
-
-
-
-
-
+
+ Clipboard
+
+
+
+
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
-
-
- Settings
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
Advanced
-
+
+
+ Settings
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Advanced
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
WebSocket
+
+
-
-
+
-
WebSocket
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
-
-
+
+
-
-
+
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Version:
+
+
+
+
+
+
+
+
-
-
-
+
+
+
-
-
-
Workspace Desktop VNC
-
Connect
-
+
+
+
+ Workspace Desktop VNC
+
+
+
+ Connect
+
+
+
-
+
-
-
-
-
-
+
+
+
+
+
-
-
+
-
-
-
+
From 3699584f6f0f2ff61cdf62a6869fc2969c198f22 Mon Sep 17 00:00:00 2001
From: Benjamin Raethlein
Date: Sat, 21 Nov 2020 20:05:11 +0100
Subject: [PATCH 050/293] Update libs to be able to build the image
---
resources/libraries/requirements-full.txt | 10 +++++-----
1 file changed, 5 insertions(+), 5 deletions(-)
diff --git a/resources/libraries/requirements-full.txt b/resources/libraries/requirements-full.txt
index 34e6b597..540485ac 100644
--- a/resources/libraries/requirements-full.txt
+++ b/resources/libraries/requirements-full.txt
@@ -26,7 +26,7 @@ chainer==7.1.0
chardet==3.0.4
classy_vision==0.2.0
cloudant==2.12.0
-coremltools==3.2
+coremltools==4.0
cufflinks==0.17.0
cx-Oracle==7.3.0
cytoolz==0.10.1
@@ -35,7 +35,7 @@ dask==2.9.2
dataset==1.2.0
datasketch==1.5.0
deap==1.3.1
-dgl==0.4.2
+dgl==0.5.2
diamond==4.0.515
distributed==2.9.3
dm-sonnet==1.35
@@ -118,8 +118,8 @@ neo4j-driver==1.7.6
nevergrad==0.3.1
nose2==0.9.1
numexpr==2.7.1
-onnx==1.6.0
-onnxruntime==1.1.1
+onnx==1.8.0
+onnxruntime==1.5.2
optuna==1.5.0
pandas-datareader==0.8.1
pandas-gbq==0.13.0
@@ -238,4 +238,4 @@ pivottablejs==0.9.0
pythreejs==2.1.1
qgrid==1.2.0
voila
-widgetsnbextension==3.5.1
\ No newline at end of file
+widgetsnbextension==3.5.1
From f2c9319c86240b130672f874c27caff6c00d3b1f Mon Sep 17 00:00:00 2001
From: Benjamin Raethlein
Date: Sat, 21 Nov 2020 21:11:34 +0100
Subject: [PATCH 051/293] Fix vncserver start command
---
resources/scripts/start-vnc-server.sh | 9 ++++++---
1 file changed, 6 insertions(+), 3 deletions(-)
diff --git a/resources/scripts/start-vnc-server.sh b/resources/scripts/start-vnc-server.sh
index ecb5830a..2946b17f 100644
--- a/resources/scripts/start-vnc-server.sh
+++ b/resources/scripts/start-vnc-server.sh
@@ -25,7 +25,10 @@ chmod 600 $HOME/.vnc/passwd
# Setting pidfile + command to execute
pidfile="$HOME/.vnc/*:1.pid"
-command="/usr/bin/vncserver $DISPLAY -geometry $VNC_RESOLUTION -depth $VNC_COL_DEPTH -name Desktop-GUI -autokill"
+config_file=$HOME/.vnc/config
+touch $config_file
+printf "geometry=$VNC_RESOLUTION\ndepth=$VNC_COL_DEPTH\ndesktop=Desktop-GUI" > ~/.vnc/config
+command="/usr/libexec/vncserver $DISPLAY"
# Proxy signals
function kill_app(){
@@ -37,7 +40,7 @@ function kill_app(){
}
trap "kill_app" SIGINT SIGTERM EXIT
-#cleanup tmp from previous run
+#cleanup tmp from previous run
# run vncserver kill in background
vncserver -kill $DISPLAY &
rm -rfv /tmp/.X*-lock /tmp/.x*-lock /tmp/.X11-unix
@@ -63,4 +66,4 @@ while [ -f $pidfile ] && kill -0 $(cat $pidfile) ; do
done
-exit 1000 # exit unexpected
\ No newline at end of file
+exit 1000 # exit unexpected
From 42712842a2f07e51044a835fcc1200101c206bdc Mon Sep 17 00:00:00 2001
From: Jan Kalkan
Date: Tue, 24 Nov 2020 16:54:25 +0100
Subject: [PATCH 052/293] Use fixed version of jupyter-tensorboard
---
resources/libraries/requirements-minimal.txt | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/resources/libraries/requirements-minimal.txt b/resources/libraries/requirements-minimal.txt
index 1c9535e2..423b4ef5 100644
--- a/resources/libraries/requirements-minimal.txt
+++ b/resources/libraries/requirements-minimal.txt
@@ -41,6 +41,6 @@ jupyterhub==1.2.1 # JupyterHub: A multi-user server for Jupyter notebooks
remote_ikernel==0.4.6 # Running IPython kernels through batch queues
jupyter_contrib_nbextensions==0.5.1 # A collection of Jupyter nbextensions.
jupyter_nbextensions_configurator==0.4.1 # jupyter serverextension providing configuration interfaces for nbextensions.
-# jupyter-tensorboard==0.1.10 # Jupyter notebook integration for tensorboard.
-git+https://github.com/cloudrainstar/jupyter_tensorboard.git # Use other version with support for tensorflow 2.X
+# jupyter-tensorboard version has a bug and hence does not support tensorboard 2.3 - 2.4 currently
+git+https://github.com/InfuseAI/jupyter_tensorboard.git
nbdime==2.1.0 # Diff and merge of Jupyter Notebooks
From a618fe68371c712ed6ed71fa699cd0ab2cb96318 Mon Sep 17 00:00:00 2001
From: Jan Kalkan
Date: Tue, 24 Nov 2020 20:54:11 +0100
Subject: [PATCH 053/293] Fix noVNC clipboard patch
---
resources/novnc/vnc.html | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/resources/novnc/vnc.html b/resources/novnc/vnc.html
index 66e1d660..03174103 100644
--- a/resources/novnc/vnc.html
+++ b/resources/novnc/vnc.html
@@ -588,6 +588,7 @@