summaryrefslogtreecommitdiffstats
path: root/development/jupyter_server
diff options
context:
space:
mode:
author Isaac Yu <isaacyu1@isaacyu1.com>2023-02-17 14:28:08 -0800
committer Willy Sudiarto Raharjo <willysr@slackbuilds.org>2023-02-18 10:06:41 +0700
commitd1326f419da9d0d610a4f306940d143148277bf5 (patch)
treee8d0016fdf4093ceae203c34f1c226ba99d426cf /development/jupyter_server
parentccd17804d54ef2e128938ab425b531f2bd60380a (diff)
downloadslackbuilds-d1326f419da9d0d610a4f306940d143148277bf5.tar.gz
slackbuilds-d1326f419da9d0d610a4f306940d143148277bf5.tar.xz
development/jupyter_server: Backport fixes from version 2.3.0.
Signed-off-by: Andrew Clemons <andrew.clemons@gmail.com> Signed-off-by: Willy Sudiarto Raharjo <willysr@slackbuilds.org>
Diffstat (limited to 'development/jupyter_server')
-rw-r--r--development/jupyter_server/fix_get_loader.patch89
-rw-r--r--development/jupyter_server/jupyter_server.SlackBuild6
-rw-r--r--development/jupyter_server/redact_tokens_from_logs.patch60
3 files changed, 154 insertions, 1 deletions
diff --git a/development/jupyter_server/fix_get_loader.patch b/development/jupyter_server/fix_get_loader.patch
new file mode 100644
index 0000000000..eb0fb99979
--- /dev/null
+++ b/development/jupyter_server/fix_get_loader.patch
@@ -0,0 +1,89 @@
+--- a/jupyter_server/extension/utils.py
++++ b/jupyter_server/extension/utils.py
+@@ -36,20 +36,24 @@
+ underscore prefix.
+ """
+ try:
+- func = getattr(obj, "_load_jupyter_server_extension") # noqa B009
++ return getattr(obj, "_load_jupyter_server_extension") # noqa B009
++ except AttributeError:
++ pass
++
++ try:
++ func = getattr(obj, "load_jupyter_server_extension") # noqa B009
+ except AttributeError:
+- func = getattr(obj, "load_jupyter_server_extension", None)
+- warnings.warn(
+- "A `_load_jupyter_server_extension` function was not "
+- "found in {name!s}. Instead, a `load_jupyter_server_extension` "
+- "function was found and will be used for now. This function "
+- "name will be deprecated in future releases "
+- "of Jupyter Server.".format(name=obj),
+- DeprecationWarning,
+- )
+- except Exception:
+ msg = "_load_jupyter_server_extension function was not found."
+ raise ExtensionLoadingError(msg) from None
++
++ warnings.warn(
++ "A `_load_jupyter_server_extension` function was not "
++ "found in {name!s}. Instead, a `load_jupyter_server_extension` "
++ "function was found and will be used for now. This function "
++ "name will be deprecated in future releases "
++ "of Jupyter Server.".format(name=obj),
++ DeprecationWarning,
++ )
+ return func
+
+
+--- a/tests/extension/mockextensions/mockext_deprecated.py
++++ b/tests/extension/mockextensions/mockext_deprecated.py
+@@ -0,0 +1,12 @@
++"""A mock extension named `mockext_py` for testing purposes.
++"""
++# Function that makes these extensions discoverable
++# by the test functions.
++
++
++def _jupyter_server_extension_paths():
++ return [{"module": "tests.extension.mockextensions.mockext_deprecated"}]
++
++
++def load_jupyter_server_extension(serverapp):
++ pass
+--- a/tests/extension/test_utils.py
++++ b/tests/extension/test_utils.py
+@@ -1,10 +1,14 @@
+ import logging
+-import warnings
+
+ import pytest
+
+-from jupyter_server.extension.utils import get_loader, get_metadata, validate_extension
+-from tests.extension.mockextensions import mockext_sys
++from jupyter_server.extension.utils import (
++ ExtensionLoadingError,
++ get_loader,
++ get_metadata,
++ validate_extension,
++)
++from tests.extension.mockextensions import mockext_deprecated, mockext_sys
+
+ # Use ServerApps environment because it monkeypatches
+ # jupyter_core.paths and provides a config directory
+@@ -24,10 +28,11 @@
+
+
+ def test_get_loader():
+- get_loader(mockext_sys)
+- with warnings.catch_warnings():
+- warnings.simplefilter("ignore")
+- assert get_loader(object()) is None
++ assert get_loader(mockext_sys) == mockext_sys._load_jupyter_server_extension
++ with pytest.deprecated_call():
++ assert get_loader(mockext_deprecated) == mockext_deprecated.load_jupyter_server_extension
++ with pytest.raises(ExtensionLoadingError):
++ get_loader(object())
+
+
+ def test_get_metadata():
diff --git a/development/jupyter_server/jupyter_server.SlackBuild b/development/jupyter_server/jupyter_server.SlackBuild
index 142418af74..0318f295dd 100644
--- a/development/jupyter_server/jupyter_server.SlackBuild
+++ b/development/jupyter_server/jupyter_server.SlackBuild
@@ -26,7 +26,7 @@ cd $(dirname $0) ; CWD=$(pwd)
PRGNAM=jupyter_server
VERSION=${VERSION:-2.2.1}
-BUILD=${BUILD:-1}
+BUILD=${BUILD:-2}
TAG=${TAG:-_SBo}
PKGTYPE=${PKGTYPE:-tgz}
@@ -76,6 +76,10 @@ find -L . \
\( -perm 666 -o -perm 664 -o -perm 640 -o -perm 600 -o -perm 444 \
-o -perm 440 -o -perm 400 \) -exec chmod 644 {} \;
+# Backports from jupyter-server 2.3.0
+patch -p1 < $CWD/redact_tokens_from_logs.patch # Redact tokens in url parameters from request logs. See upstream PR #1212
+patch -p1 < $CWD/fix_get_loader.patch # See upstream PR #1193
+
python3 -m build --no-isolation
python3 -m installer -d "$PKG" dist/*.whl
diff --git a/development/jupyter_server/redact_tokens_from_logs.patch b/development/jupyter_server/redact_tokens_from_logs.patch
new file mode 100644
index 0000000000..74ac1b9196
--- /dev/null
+++ b/development/jupyter_server/redact_tokens_from_logs.patch
@@ -0,0 +1,60 @@
+--- a/jupyter_server/log.py
++++ b/jupyter_server/log.py
+@@ -6,12 +6,39 @@
+ # the file COPYING, distributed as part of this software.
+ # -----------------------------------------------------------------------------
+ import json
++from urllib.parse import urlparse, urlunparse
+
+ from tornado.log import access_log
+
+ from .auth import User
+ from .prometheus.log_functions import prometheus_log_method
+
++# url params to be scrubbed if seen
++# any url param that *contains* one of these
++# will be scrubbed from logs
++_SCRUB_PARAM_KEYS = {"token", "auth", "key", "code", "state", "xsrf"}
++
++
++def _scrub_uri(uri: str) -> str:
++ """scrub auth info from uri"""
++ parsed = urlparse(uri)
++ if parsed.query:
++ # check for potentially sensitive url params
++ # use manual list + split rather than parsing
++ # to minimally perturb original
++ parts = parsed.query.split("&")
++ changed = False
++ for i, s in enumerate(parts):
++ key, sep, value = s.partition("=")
++ for substring in _SCRUB_PARAM_KEYS:
++ if substring in key:
++ parts[i] = f"{key}{sep}[secret]"
++ changed = True
++ if changed:
++ parsed = parsed._replace(query="&".join(parts))
++ return urlunparse(parsed)
++ return uri
++
+
+ def log_request(handler):
+ """log a bit more information about each request than tornado's default
+@@ -43,7 +70,7 @@
+ "status": status,
+ "method": request.method,
+ "ip": request.remote_ip,
+- "uri": request.uri,
++ "uri": _scrub_uri(request.uri),
+ "request_time": request_time,
+ }
+ # log username
+@@ -59,7 +86,7 @@
+ msg = "{status} {method} {uri} ({username}@{ip}) {request_time:.2f}ms"
+ if status >= 400: # noqa[PLR2004]
+ # log bad referers
+- ns["referer"] = request.headers.get("Referer", "None")
++ ns["referer"] = _scrub_uri(request.headers.get("Referer", "None"))
+ msg = msg + " referer={referer}"
+ if status >= 500 and status != 502: # noqa[PLR2004]
+ # Log a subset of the headers if it caused an error.