diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 96e503b..c1c67bd 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -64,7 +64,7 @@ jobs: uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4 with: name: package-${{ github.sha }} - path: conda-bld/*/*.tar.bz2 + path: conda-bld tests: runs-on: ${{ matrix.os }} needs: [build,testbed] @@ -105,9 +105,14 @@ jobs: source $CONDA_ROOT/etc/profile.d/conda.sh [ "$RUNNER_OS" = "Windows" ] && export PYTHONIOENCODING=UTF-8 export PYTHONUNBUFFERED=1 - conda build --test conda-bld/noarch/*.tar.bz2 | tee build.log + export NBVER=6 + [ 3.12 = ${{ matrix.pyver }} ] && export NBVER=7 + conda create -n testbase -c ./conda-bld nb_conda_kernels python=${{ matrix.pyver }} notebook=$NBVER pytest pytest-cov mock requests + conda activate testbase + python -m nb_conda_kernels list + python -m pytest -v --cov=nb_conda_kernels tests 2>&1 | tee build.log # Because Windows refuses to preserve the error code - if grep ' FAILED ' build.log; then exit -1; fi + if grep -E '^(FAILED|ERROR) ' build.log; then exit -1; fi upload: needs: tests runs-on: ubuntu-latest diff --git a/conda-recipe/meta.yaml b/conda-recipe/meta.yaml index e37f605..108f543 100644 --- a/conda-recipe/meta.yaml +++ b/conda-recipe/meta.yaml @@ -21,8 +21,9 @@ requirements: - python >=3.6 - jupyter_client >=4.2 - jupyter_core - - traitlets - psutil + run_constrained: + - notebook >=5.3.0 test: source_files: diff --git a/nb_conda_kernels/install.py b/nb_conda_kernels/install.py index d6356ae..a889fe8 100644 --- a/nb_conda_kernels/install.py +++ b/nb_conda_kernels/install.py @@ -6,9 +6,27 @@ from os.path import join, abspath, exists -from traitlets.config.manager import BaseJSONConfigManager from jupyter_core.paths import jupyter_config_path -from jupyter_client import __version__ as jc_version + +try: + from notebook import __version__ as nb_version +except ImportError: + nb_version = '999' + +try: + from jupyter_server.config_manager import BaseJSONConfigManager +except ImportError: + try: + from notebook.config_manager import BaseJSONConfigManager + except ImportError: + raise ImportError("Must have notebook>=5.3 or jupyter_server installed") + + +# If true, we need to add a NotebokApp entry into jupyter_config.json. +# If false, we should avoid doing so, since notebook 7 and later have +# removed direct support for kernel spec managers in favor of relying +# on jupyter_server. +NEED_NOTEBOOK = int(nb_version.split('.', 1)[0]) < 7 log = logging.getLogger(__name__) @@ -16,6 +34,7 @@ JA = "JupyterApp" NBA = "NotebookApp" +SA = "ServerApp" CKSM = "nb_conda_kernels.CondaKernelSpecManager" JKSM = "jupyter_client.kernelspec.KernelSpecManager" KSMC = "kernel_spec_manager_class" @@ -107,6 +126,7 @@ def install(enable=False, disable=False, status=None, prefix=None, path=None, ve fpaths = set() is_enabled_all = {} is_enabled_local = {} + need_keys = (SA, NBA) if NEED_NOTEBOOK else (SA,) for path_g in search_paths: flag = '-' if path != path_g else ('*' if path in all_paths else 'x') value = '' @@ -114,12 +134,12 @@ def install(enable=False, disable=False, status=None, prefix=None, path=None, ve fpath = join(path_g, fbase + '.json') cfg = BaseJSONConfigManager(config_dir=path_g).get(fbase) dirty = False - for key in (JA, NBA): + for key in (JA, NBA, SA): spec = cfg.get(key, {}).get(KSMC) if status or path_g != path: # No changes in status mode, or if we're not in the target path expected = spec - elif enable and fbase == JC and key == JA: + elif enable and fbase == JC and key in need_keys: # Add the spec if we are enabling, the entry point is not active, # and we're using the new file (jupyter_config.json) and key (JupyterApp) expected = CKSM @@ -151,8 +171,8 @@ def install(enable=False, disable=False, status=None, prefix=None, path=None, ve value += ': ' value += '\n '.join(json.dumps(cfg, indent=2).splitlines()) log.debug(' {} {}: {}'.format(flag, shorten(path_g), value or '')) - is_enabled_all = bool(is_enabled_all.get(NBA, is_enabled_all.get(JA))) - is_enabled_local = bool(is_enabled_local.get(NBA, is_enabled_local.get(JA))) + is_enabled_all = all(is_enabled_all.get(k) for k in need_keys) + is_enabled_local = all(is_enabled_local.get(k) for k in need_keys) if is_enabled_all != is_enabled_local: sev = 'WARNING' if status else 'ERROR' diff --git a/nb_conda_kernels/manager.py b/nb_conda_kernels/manager.py index 5178717..114f133 100644 --- a/nb_conda_kernels/manager.py +++ b/nb_conda_kernels/manager.py @@ -54,7 +54,7 @@ def _validate_kernelspec_path(self, proposal): if new_value not in ("", "--user", "--sys-prefix"): if not os.path.isdir(self.kernelspec_path): raise TraitError("CondaKernelSpecManager.kernelspec_path is not a directory.") - self.log.debug("[nb_conda_kernels] Force conda_only=True as kernelspec_path is not None.") + self.log.debug("nb_conda_kernels | Force conda_only=True as kernelspec_path is not None.") self.conda_only = True return new_value @@ -92,7 +92,7 @@ def __init__(self, **kwargs): self._kernel_prefix = sys.prefix if self.kernelspec_path == "--sys-prefix" else self.kernelspec_path self.log.info( - "[nb_conda_kernels] enabled, %s kernels found", len(self._conda_kspecs) + "nb_conda_kernels | enabled, %s kernels found.", len(self._conda_kspecs) ) @staticmethod @@ -147,10 +147,10 @@ def run(self): # cache is empty if expiry is None: - self.log.debug("[nb_conda_kernels] refreshing conda info (blocking call)") + self.log.debug("nb_conda_kernels | refreshing conda info (blocking call)") conda_info, err = get_conda_info_data() if conda_info is None: - self.log.error("[nb_conda_kernels] couldn't call conda:\n%s", err) + self.log.error("nb_conda_kernels | couldn't call conda:\n%s", err) self._conda_info_cache = conda_info self._conda_info_cache_expiry = time.time() + CACHE_TIMEOUT @@ -159,16 +159,16 @@ def run(self): t.join() conda_info = t.out if conda_info is None: - self.log.error("[nb_conda_kernels] couldn't call conda:\n%s", t.err) + self.log.error("nb_conda_kernels | couldn't call conda:\n%s", t.err) else: - self.log.debug("[nb_conda_kernels] collected conda info (async call)") + self.log.debug("nb_conda_kernels | collected conda info (async call)") self._conda_info_cache = conda_info self._conda_info_cache_expiry = time.time() + CACHE_TIMEOUT self._conda_info_cache_thread = None # cache expired elif not t and expiry < time.time(): - self.log.debug("[nb_conda_kernels] refreshing conda info (async call)") + self.log.debug("nb_conda_kernels | refreshing conda info (async call)") t = CondaInfoThread() t.start() self._conda_info_cache_thread = t @@ -249,13 +249,13 @@ def _all_specs(self): data = fp.read() spec = json.loads(data.decode('utf-8')) except Exception as err: - self.log.error("[nb_conda_kernels] error loading %s:\n%s", + self.log.error("nb_conda_kernels | error loading %s:\n%s", spec_path, err) continue kernel_dir = dirname(spec_path).lower() kernel_name = raw_kernel_name = basename(kernel_dir) if self.kernelspec_path is not None and kernel_name.startswith("conda-"): - self.log.debug("[nb_conda_kernels] Skipping kernel spec %s", spec_path) + self.log.debug("nb_conda_kernels | Skipping kernel spec %s", spec_path) continue # Ensure to skip dynamically added kernel spec within the environment prefix # We're doing a few of these adjustments here to ensure that # the naming convention is as close as possible to the previous @@ -312,7 +312,7 @@ def _all_specs(self): json.dump(tmp_spec, f) except OSError as error: self.log.warning( - u"[nb_conda_kernels] Fail to install kernel '{}'.".format(kernel_dir), + u"nb_conda_kernels | Fail to install kernel '{}'.".format(kernel_dir), exc_info=error ) diff --git a/tests/test_api.py b/tests/test_api.py index e49b9bc..189c92d 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1,5 +1,10 @@ from subprocess import check_output, CalledProcessError -from notebook.services.kernelspecs.tests import test_kernelspecs_api + +try: + from notebook.services.kernelspecs.tests import test_kernelspecs_api +except Exception: + import pytest + pytest.skip('Requires notebook<7', allow_module_level=True) try: from unittest.mock import patch