From 290578ca170c0d182af8d27397c6fef45e1d6cc0 Mon Sep 17 00:00:00 2001 From: Paul Hewlett <1104895+eccles@users.noreply.github.com> Date: Mon, 25 Nov 2024 11:00:48 +0000 Subject: [PATCH] Allow binary input and output (#277) * Allow binary input and output Refactored how the appidp uses the underlying archivist instance. appidp does not require a token but does require headers - this fix has corrected an undetected bug where the user and partner headers were not passed thru the appidp endpoint. Additionally notebooks failed and this was fixed - new jupyter has some changes in cell magic keywords. Changed how the environment is handled in yhe Initialization notebook. The archivist post and get methods now have get_binary and post_binary methods that can be used by non-rest type calls to input binary and output binary data. Remoced conditional check of running pytight on python 3.12. pyright now supports 3.12 and later. AB#10061 * Fix spelling failure in docs build --------- Co-authored-by: Jon Geater --- .gitignore | 1 + .jupyter/jupyter_notebook_config.py | 1438 ----------------- .jupyter/migrated | 1 - Taskfile.yml | 7 +- archivist/appidp.py | 1 + archivist/archivist.py | 102 +- archivist/archivistpublic.py | 55 +- archivist/constants.py | 6 + .../Initialization and Credentials.ipynb | 77 +- archivist/utils.py | 5 + docs/notebooks/requirements.txt | 7 +- docs/spelling_wordlist.txt | 1 + functests/execcompliance_policies.py | 5 +- functests/execnotebooks.py | 7 +- scripts/notebooks.sh | 44 +- scripts/venv.sh | 2 +- unittests/mock_response.py | 13 +- unittests/testappidp.py | 6 + unittests/testarchivistget.py | 29 + unittests/testarchivistpost.py | 36 + 20 files changed, 303 insertions(+), 1540 deletions(-) delete mode 100644 .jupyter/jupyter_notebook_config.py delete mode 100644 .jupyter/migrated diff --git a/.gitignore b/.gitignore index 5b06aa68..81afbb1e 100644 --- a/.gitignore +++ b/.gitignore @@ -22,3 +22,4 @@ datatrails-venv/ stuntidp* .npm/ functest-results/ +.jupyter diff --git a/.jupyter/jupyter_notebook_config.py b/.jupyter/jupyter_notebook_config.py deleted file mode 100644 index 69444959..00000000 --- a/.jupyter/jupyter_notebook_config.py +++ /dev/null @@ -1,1438 +0,0 @@ -# Configuration file for jupyter-notebook. - -c = get_config() #noqa - -#------------------------------------------------------------------------------ -# Application(SingletonConfigurable) configuration -#------------------------------------------------------------------------------ -## This is an application. - -## The date format used by logging formatters for %(asctime)s -# Default: '%Y-%m-%d %H:%M:%S' -# c.Application.log_datefmt = '%Y-%m-%d %H:%M:%S' - -## The Logging format template -# Default: '[%(name)s]%(highlevel)s %(message)s' -# c.Application.log_format = '[%(name)s]%(highlevel)s %(message)s' - -## Set the log level by value or name. -# Choices: any of [0, 10, 20, 30, 40, 50, 'DEBUG', 'INFO', 'WARN', 'ERROR', 'CRITICAL'] -# Default: 30 -# c.Application.log_level = 30 - -## Configure additional log handlers. -# -# The default stderr logs handler is configured by the log_level, log_datefmt -# and log_format settings. -# -# This configuration can be used to configure additional handlers (e.g. to -# output the log to a file) or for finer control over the default handlers. -# -# If provided this should be a logging configuration dictionary, for more -# information see: -# https://docs.python.org/3/library/logging.config.html#logging-config- -# dictschema -# -# This dictionary is merged with the base logging configuration which defines -# the following: -# -# * A logging formatter intended for interactive use called -# ``console``. -# * A logging handler that writes to stderr called -# ``console`` which uses the formatter ``console``. -# * A logger with the name of this application set to ``DEBUG`` -# level. -# -# This example adds a new handler that writes to a file: -# -# .. code-block:: python -# -# c.Application.logging_config = { -# 'handlers': { -# 'file': { -# 'class': 'logging.FileHandler', -# 'level': 'DEBUG', -# 'filename': '', -# } -# }, -# 'loggers': { -# '': { -# 'level': 'DEBUG', -# # NOTE: if you don't list the default "console" -# # handler here then it will be disabled -# 'handlers': ['console', 'file'], -# }, -# } -# } -# Default: {} -# c.Application.logging_config = {} - -## Instead of starting the Application, dump configuration to stdout -# Default: False -# c.Application.show_config = False - -## Instead of starting the Application, dump configuration to stdout (as JSON) -# Default: False -# c.Application.show_config_json = False - -#------------------------------------------------------------------------------ -# JupyterApp(Application) configuration -#------------------------------------------------------------------------------ -## Base class for Jupyter applications - -## Answer yes to any prompts. -# Default: False -# c.JupyterApp.answer_yes = False - -## Full path of a config file. -# Default: '' -# c.JupyterApp.config_file = '' - -## Specify a config file to load. -# Default: '' -# c.JupyterApp.config_file_name = '' - -## Generate default config file. -# Default: False -# c.JupyterApp.generate_config = False - -## The date format used by logging formatters for %(asctime)s -# See also: Application.log_datefmt -# c.JupyterApp.log_datefmt = '%Y-%m-%d %H:%M:%S' - -## The Logging format template -# See also: Application.log_format -# c.JupyterApp.log_format = '[%(name)s]%(highlevel)s %(message)s' - -## Set the log level by value or name. -# See also: Application.log_level -# c.JupyterApp.log_level = 30 - -## -# See also: Application.logging_config -# c.JupyterApp.logging_config = {} - -## Instead of starting the Application, dump configuration to stdout -# See also: Application.show_config -# c.JupyterApp.show_config = False - -## Instead of starting the Application, dump configuration to stdout (as JSON) -# See also: Application.show_config_json -# c.JupyterApp.show_config_json = False - -#------------------------------------------------------------------------------ -# NotebookApp(JupyterApp) configuration -#------------------------------------------------------------------------------ -## Set the Access-Control-Allow-Credentials: true header -# Default: False -# c.NotebookApp.allow_credentials = False - -## Set the Access-Control-Allow-Origin header -# -# Use '*' to allow any origin to access your server. -# -# Takes precedence over allow_origin_pat. -# Default: '' -# c.NotebookApp.allow_origin = '' - -## Use a regular expression for the Access-Control-Allow-Origin header -# -# Requests from an origin matching the expression will get replies with: -# -# Access-Control-Allow-Origin: origin -# -# where `origin` is the origin of the request. -# -# Ignored if allow_origin is set. -# Default: '' -# c.NotebookApp.allow_origin_pat = '' - -## Allow password to be changed at login for the notebook server. -# -# While logging in with a token, the notebook server UI will give the opportunity to -# the user to enter a new password at the same time that will replace -# the token login mechanism. -# -# This can be set to false to prevent changing password from -# the UI/API. -# Default: True -# c.NotebookApp.allow_password_change = True - -## Allow requests where the Host header doesn't point to a local server -# -# By default, requests get a 403 forbidden response if the 'Host' header -# shows that the browser thinks it's on a non-local domain. -# Setting this option to True disables this check. -# -# This protects against 'DNS rebinding' attacks, where a remote web server -# serves you a page and then changes its DNS to send later requests to a -# local IP, bypassing same-origin checks. -# -# Local IP addresses (such as 127.0.0.1 and ::1) are allowed as local, -# along with hostnames configured in local_hostnames. -# Default: False -# c.NotebookApp.allow_remote_access = False - -## Whether to allow the user to run the notebook as root. -# Default: False -# c.NotebookApp.allow_root = False - -## Answer yes to any prompts. -# See also: JupyterApp.answer_yes -# c.NotebookApp.answer_yes = False - -## " -# Require authentication to access prometheus metrics. -# Default: True -# c.NotebookApp.authenticate_prometheus = True - -## Reload the webapp when changes are made to any Python src files. -# Default: False -# c.NotebookApp.autoreload = False - -## DEPRECATED use base_url -# Default: '/' -# c.NotebookApp.base_project_url = '/' - -## The base URL for the notebook server. -# -# Leading and trailing slashes can be omitted, -# and will automatically be added. -# Default: '/' -# c.NotebookApp.base_url = '/' - -## Specify what command to use to invoke a web -# browser when opening the notebook. If not specified, the -# default browser will be determined by the `webbrowser` -# standard library module, which allows setting of the -# BROWSER environment variable to override it. -# Default: '' -# c.NotebookApp.browser = '' - -## The full path to an SSL/TLS certificate file. -# Default: '' -# c.NotebookApp.certfile = '' - -## The full path to a certificate authority certificate for SSL/TLS client -# authentication. -# Default: '' -# c.NotebookApp.client_ca = '' - -## Full path of a config file. -# See also: JupyterApp.config_file -# c.NotebookApp.config_file = '' - -## Specify a config file to load. -# See also: JupyterApp.config_file_name -# c.NotebookApp.config_file_name = '' - -## The config manager class to use -# Default: 'notebook.services.config.manager.ConfigManager' -# c.NotebookApp.config_manager_class = 'notebook.services.config.manager.ConfigManager' - -## The notebook manager class to use. -# Default: 'notebook.services.contents.largefilemanager.LargeFileManager' -# c.NotebookApp.contents_manager_class = 'notebook.services.contents.largefilemanager.LargeFileManager' - -## Extra keyword arguments to pass to `set_secure_cookie`. See tornado's -# set_secure_cookie docs for details. -# Default: {} -# c.NotebookApp.cookie_options = {} - -## The random bytes used to secure cookies. -# By default this is a new random number every time you start the Notebook. -# Set it to a value in a config file to enable logins to persist across server sessions. -# -# Note: Cookie secrets should be kept private, do not share config files with -# cookie_secret stored in plaintext (you can read the value from a file). -# Default: b'' -# c.NotebookApp.cookie_secret = b'' - -## The file where the cookie secret is stored. -# Default: '' -# c.NotebookApp.cookie_secret_file = '' - -## Override URL shown to users. -# -# Replace actual URL, including protocol, address, port and base URL, -# with the given value when displaying URL to the users. Do not change -# the actual connection URL. If authentication token is enabled, the -# token is added to the custom URL automatically. -# -# This option is intended to be used when the URL to display to the user -# cannot be determined reliably by the Jupyter notebook server (proxified -# or containerized setups for example). -# Default: '' -# c.NotebookApp.custom_display_url = '' - -## The default URL to redirect to from `/` -# Default: '/tree' -# c.NotebookApp.default_url = '/tree' - -## Disable cross-site-request-forgery protection -# -# Jupyter notebook 4.3.1 introduces protection from cross-site request forgeries, -# requiring API requests to either: -# -# - originate from pages served by this server (validated with XSRF cookie and token), or -# - authenticate with a token -# -# Some anonymous compute resources still desire the ability to run code, -# completely without authentication. -# These services can disable all authentication and security checks, -# with the full knowledge of what that implies. -# Default: False -# c.NotebookApp.disable_check_xsrf = False - -## Whether to enable MathJax for typesetting math/TeX -# -# MathJax is the javascript library Jupyter uses to render math/LaTeX. It is -# very large, so you may want to disable it if you have a slow internet -# connection, or for offline use of the notebook. -# -# When disabled, equations etc. will appear as their untransformed TeX -# source. -# Default: True -# c.NotebookApp.enable_mathjax = True - -## extra paths to look for Javascript notebook extensions -# Default: [] -# c.NotebookApp.extra_nbextensions_path = [] - -## handlers that should be loaded at higher priority than the default services -# Default: [] -# c.NotebookApp.extra_services = [] - -## Extra paths to search for serving static files. -# -# This allows adding javascript/css to be available from the notebook server machine, -# or overriding individual files in the IPython -# Default: [] -# c.NotebookApp.extra_static_paths = [] - -## Extra paths to search for serving jinja templates. -# -# Can be used to override templates from notebook.templates. -# Default: [] -# c.NotebookApp.extra_template_paths = [] - -# Default: '' -# c.NotebookApp.file_to_run = '' - -## Generate default config file. -# See also: JupyterApp.generate_config -# c.NotebookApp.generate_config = False - -## Extra keyword arguments to pass to `get_secure_cookie`. See tornado's -# get_secure_cookie docs for details. -# Default: {} -# c.NotebookApp.get_secure_cookie_kwargs = {} - -## Deprecated: Use minified JS file or not, mainly use during dev to avoid JS -# recompilation -# Default: False -# c.NotebookApp.ignore_minified_js = False - -## (bytes/sec) -# Maximum rate at which stream output can be sent on iopub before they are -# limited. -# Default: 1000000 -# c.NotebookApp.iopub_data_rate_limit = 1000000 - -## (msgs/sec) -# Maximum rate at which messages can be sent on iopub before they are -# limited. -# Default: 1000 -# c.NotebookApp.iopub_msg_rate_limit = 1000 - -## The IP address the notebook server will listen on. -# Default: 'localhost' -# c.NotebookApp.ip = 'localhost' - -## Supply extra arguments that will be passed to Jinja environment. -# Default: {} -# c.NotebookApp.jinja_environment_options = {} - -## Extra variables to supply to jinja templates when rendering. -# Default: {} -# c.NotebookApp.jinja_template_vars = {} - -## The kernel manager class to use. -# Default: 'notebook.services.kernels.kernelmanager.MappingKernelManager' -# c.NotebookApp.kernel_manager_class = 'notebook.services.kernels.kernelmanager.MappingKernelManager' - -## The kernel spec manager class to use. Should be a subclass of -# `jupyter_client.kernelspec.KernelSpecManager`. -# -# The Api of KernelSpecManager is provisional and might change without warning -# between this version of Jupyter and the next stable one. -# Default: 'jupyter_client.kernelspec.KernelSpecManager' -# c.NotebookApp.kernel_spec_manager_class = 'jupyter_client.kernelspec.KernelSpecManager' - -## The full path to a private key file for usage with SSL/TLS. -# Default: '' -# c.NotebookApp.keyfile = '' - -## Hostnames to allow as local when allow_remote_access is False. -# -# Local IP addresses (such as 127.0.0.1 and ::1) are automatically accepted -# as local as well. -# Default: ['localhost'] -# c.NotebookApp.local_hostnames = ['localhost'] - -## The date format used by logging formatters for %(asctime)s -# See also: Application.log_datefmt -# c.NotebookApp.log_datefmt = '%Y-%m-%d %H:%M:%S' - -## The Logging format template -# See also: Application.log_format -# c.NotebookApp.log_format = '[%(name)s]%(highlevel)s %(message)s' - -## Set to True to enable JSON formatted logs. Run "pip install notebook[json- -# logging]" to install the required dependent packages. Can also be set using -# the environment variable JUPYTER_ENABLE_JSON_LOGGING=true. -# Default: False -# c.NotebookApp.log_json = False - -## Set the log level by value or name. -# See also: Application.log_level -# c.NotebookApp.log_level = 30 - -## -# See also: Application.logging_config -# c.NotebookApp.logging_config = {} - -## The login handler class to use. -# Default: 'notebook.auth.login.LoginHandler' -# c.NotebookApp.login_handler_class = 'notebook.auth.login.LoginHandler' - -## The logout handler class to use. -# Default: 'notebook.auth.logout.LogoutHandler' -# c.NotebookApp.logout_handler_class = 'notebook.auth.logout.LogoutHandler' - -## The MathJax.js configuration file that is to be used. -# Default: 'TeX-AMS-MML_HTMLorMML-full,Safe' -# c.NotebookApp.mathjax_config = 'TeX-AMS-MML_HTMLorMML-full,Safe' - -## A custom url for MathJax.js. -# Should be in the form of a case-sensitive url to MathJax, -# for example: /static/components/MathJax/MathJax.js -# Default: '' -# c.NotebookApp.mathjax_url = '' - -## Sets the maximum allowed size of the client request body, specified in the -# Content-Length request header field. If the size in a request exceeds the -# configured value, a malformed HTTP message is returned to the client. -# -# Note: max_body_size is applied even in streaming mode. -# Default: 536870912 -# c.NotebookApp.max_body_size = 536870912 - -## Gets or sets the maximum amount of memory, in bytes, that is allocated for use -# by the buffer manager. -# Default: 536870912 -# c.NotebookApp.max_buffer_size = 536870912 - -## Gets or sets a lower bound on the open file handles process resource limit. -# This may need to be increased if you run into an OSError: [Errno 24] Too many -# open files. This is not applicable when running on Windows. -# Default: 0 -# c.NotebookApp.min_open_files_limit = 0 - -## Dict of Python modules to load as notebook server extensions. Entry values can -# be used to enable and disable the loading of the extensions. The extensions -# will be loaded in alphabetical order. -# Default: {} -# c.NotebookApp.nbserver_extensions = {} - -## The directory to use for notebooks and kernels. -# Default: '' -# c.NotebookApp.notebook_dir = '' - -## Whether to open in a browser after starting. -# The specific browser used is platform dependent and -# determined by the python standard library `webbrowser` -# module, unless it is overridden using the --browser -# (NotebookApp.browser) configuration option. -# Default: True -# c.NotebookApp.open_browser = True - -## Hashed password to use for web authentication. -# -# To generate, type in a python/IPython shell: -# -# from notebook.auth import passwd; passwd() -# -# The string should be of the form type:salt:hashed- -# password. -# Default: '' -# c.NotebookApp.password = '' - -## Forces users to use a password for the Notebook server. -# This is useful in a multi user environment, for instance when -# everybody in the LAN can access each other's machine through ssh. -# -# In such a case, serving the notebook server on localhost is not secure -# since any user can connect to the notebook server via ssh. -# Default: False -# c.NotebookApp.password_required = False - -## The port the notebook server will listen on (env: JUPYTER_PORT). -# Default: 8888 -# c.NotebookApp.port = 8888 - -## The number of additional ports to try if the specified port is not available -# (env: JUPYTER_PORT_RETRIES). -# Default: 50 -# c.NotebookApp.port_retries = 50 - -## DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib. -# Default: 'disabled' -# c.NotebookApp.pylab = 'disabled' - -## If True, display a button in the dashboard to quit -# (shutdown the notebook server). -# Default: True -# c.NotebookApp.quit_button = True - -## (sec) Time window used to -# check the message and data rate limits. -# Default: 3 -# c.NotebookApp.rate_limit_window = 3 - -## Reraise exceptions encountered loading server extensions? -# Default: False -# c.NotebookApp.reraise_server_extension_failures = False - -## DEPRECATED use the nbserver_extensions dict instead -# Default: [] -# c.NotebookApp.server_extensions = [] - -## The session manager class to use. -# Default: 'notebook.services.sessions.sessionmanager.SessionManager' -# c.NotebookApp.session_manager_class = 'notebook.services.sessions.sessionmanager.SessionManager' - -## Instead of starting the Application, dump configuration to stdout -# See also: Application.show_config -# c.NotebookApp.show_config = False - -## Instead of starting the Application, dump configuration to stdout (as JSON) -# See also: Application.show_config_json -# c.NotebookApp.show_config_json = False - -## Shut down the server after N seconds with no kernels or terminals running and -# no activity. This can be used together with culling idle kernels -# (MappingKernelManager.cull_idle_timeout) to shutdown the notebook server when -# it's not in use. This is not precisely timed: it may shut down up to a minute -# later. 0 (the default) disables this automatic shutdown. -# Default: 0 -# c.NotebookApp.shutdown_no_activity_timeout = 0 - -## The UNIX socket the notebook server will listen on. -# Default: '' -# c.NotebookApp.sock = '' - -## The permissions mode for UNIX socket creation (default: 0600). -# Default: '0600' -# c.NotebookApp.sock_mode = '0600' - -## Supply SSL options for the tornado HTTPServer. -# See the tornado docs for details. -# Default: {} -# c.NotebookApp.ssl_options = {} - -## Supply overrides for terminado. Currently only supports "shell_command". On -# Unix, if "shell_command" is not provided, a non-login shell is launched by -# default when the notebook server is connected to a terminal, a login shell -# otherwise. -# Default: {} -# c.NotebookApp.terminado_settings = {} - -## Set to False to disable terminals. -# -# This does *not* make the notebook server more secure by itself. -# Anything the user can in a terminal, they can also do in a notebook. -# -# Terminals may also be automatically disabled if the terminado package -# is not available. -# Default: True -# c.NotebookApp.terminals_enabled = True - -## Token used for authenticating first-time connections to the server. -# -# The token can be read from the file referenced by JUPYTER_TOKEN_FILE or set directly -# with the JUPYTER_TOKEN environment variable. -# -# When no password is enabled, -# the default is to generate a new, random token. -# -# Setting to an empty string disables authentication altogether, which -# is NOT RECOMMENDED. -# Default: '' -# c.NotebookApp.token = '' - -## Supply overrides for the tornado.web.Application that the Jupyter notebook -# uses. -# Default: {} -# c.NotebookApp.tornado_settings = {} - -## Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded- -# For headers sent by the upstream reverse proxy. Necessary if the proxy handles -# SSL -# Default: False -# c.NotebookApp.trust_xheaders = False - -## Disable launching browser by redirect file -# -# For versions of notebook > 5.7.2, a security feature measure was added that -# prevented the authentication token used to launch the browser from being visible. -# This feature makes it difficult for other users on a multi-user system from -# running code in your Jupyter session as you. -# -# However, some environments (like Windows Subsystem for Linux (WSL) and Chromebooks), -# launching a browser using a redirect file can lead the browser failing to load. -# This is because of the difference in file structures/paths between the runtime and -# the browser. -# -# Disabling this setting to False will disable this behavior, allowing the browser -# to launch by using a URL and visible token (as before). -# Default: True -# c.NotebookApp.use_redirect_file = True - -## DEPRECATED, use tornado_settings -# Default: {} -# c.NotebookApp.webapp_settings = {} - -## Specify Where to open the notebook on startup. This is the -# `new` argument passed to the standard library method `webbrowser.open`. -# The behaviour is not guaranteed, but depends on browser support. Valid -# values are: -# -# - 2 opens a new tab, -# - 1 opens a new window, -# - 0 opens in an existing window. -# -# See the `webbrowser.open` documentation for details. -# Default: 2 -# c.NotebookApp.webbrowser_open_new = 2 - -## Set the tornado compression options for websocket connections. -# -# This value will be returned from -# :meth:`WebSocketHandler.get_compression_options`. None (default) will disable -# compression. A dict (even an empty one) will enable compression. -# -# See the tornado docs for WebSocketHandler.get_compression_options for details. -# Default: None -# c.NotebookApp.websocket_compression_options = None - -## The base URL for websockets, -# if it differs from the HTTP server (hint: it almost certainly doesn't). -# -# Should be in the form of an HTTP origin: ws[s]://hostname[:port] -# Default: '' -# c.NotebookApp.websocket_url = '' - -#------------------------------------------------------------------------------ -# ConnectionFileMixin(LoggingConfigurable) configuration -#------------------------------------------------------------------------------ -## Mixin for configurable classes that work with connection files - -## JSON file in which to store connection info [default: kernel-.json] -# -# This file will contain the IP, ports, and authentication key needed to connect -# clients to this kernel. By default, this file will be created in the security dir -# of the current profile, but can be specified by absolute path. -# Default: '' -# c.ConnectionFileMixin.connection_file = '' - -## set the control (ROUTER) port [default: random] -# Default: 0 -# c.ConnectionFileMixin.control_port = 0 - -## set the heartbeat port [default: random] -# Default: 0 -# c.ConnectionFileMixin.hb_port = 0 - -## set the iopub (PUB) port [default: random] -# Default: 0 -# c.ConnectionFileMixin.iopub_port = 0 - -## Set the kernel's IP address [default localhost]. -# If the IP address is something other than localhost, then -# Consoles on other machines will be able to connect -# to the Kernel, so be careful! -# Default: '' -# c.ConnectionFileMixin.ip = '' - -## set the shell (ROUTER) port [default: random] -# Default: 0 -# c.ConnectionFileMixin.shell_port = 0 - -## set the stdin (ROUTER) port [default: random] -# Default: 0 -# c.ConnectionFileMixin.stdin_port = 0 - -# Choices: any of ['tcp', 'ipc'] (case-insensitive) -# Default: 'tcp' -# c.ConnectionFileMixin.transport = 'tcp' - -#------------------------------------------------------------------------------ -# KernelManager(ConnectionFileMixin) configuration -#------------------------------------------------------------------------------ -## Manages a single kernel in a subprocess on this host. -# -# This version starts kernels with Popen. - -## Should we autorestart the kernel if it dies. -# Default: True -# c.KernelManager.autorestart = True - -## JSON file in which to store connection info [default: kernel-.json] -# See also: ConnectionFileMixin.connection_file -# c.KernelManager.connection_file = '' - -## set the control (ROUTER) port [default: random] -# See also: ConnectionFileMixin.control_port -# c.KernelManager.control_port = 0 - -## set the heartbeat port [default: random] -# See also: ConnectionFileMixin.hb_port -# c.KernelManager.hb_port = 0 - -## set the iopub (PUB) port [default: random] -# See also: ConnectionFileMixin.iopub_port -# c.KernelManager.iopub_port = 0 - -## Set the kernel's IP address [default localhost]. -# See also: ConnectionFileMixin.ip -# c.KernelManager.ip = '' - -## set the shell (ROUTER) port [default: random] -# See also: ConnectionFileMixin.shell_port -# c.KernelManager.shell_port = 0 - -## Time to wait for a kernel to terminate before killing it, in seconds. When a -# shutdown request is initiated, the kernel will be immediately sent an -# interrupt (SIGINT), followedby a shutdown_request message, after 1/2 of -# `shutdown_wait_time`it will be sent a terminate (SIGTERM) request, and finally -# at the end of `shutdown_wait_time` will be killed (SIGKILL). terminate and -# kill may be equivalent on windows. Note that this value can beoverridden by -# the in-use kernel provisioner since shutdown times mayvary by provisioned -# environment. -# Default: 5.0 -# c.KernelManager.shutdown_wait_time = 5.0 - -## set the stdin (ROUTER) port [default: random] -# See also: ConnectionFileMixin.stdin_port -# c.KernelManager.stdin_port = 0 - -# See also: ConnectionFileMixin.transport -# c.KernelManager.transport = 'tcp' - -#------------------------------------------------------------------------------ -# Session(Configurable) configuration -#------------------------------------------------------------------------------ -## Object for handling serialization and sending of messages. -# -# The Session object handles building messages and sending them -# with ZMQ sockets or ZMQStream objects. Objects can communicate with each -# other over the network via Session objects, and only need to work with the -# dict-based IPython message spec. The Session will handle -# serialization/deserialization, security, and metadata. -# -# Sessions support configurable serialization via packer/unpacker traits, -# and signing with HMAC digests via the key/keyfile traits. -# -# Parameters -# ---------- -# -# debug : bool -# whether to trigger extra debugging statements -# packer/unpacker : str : 'json', 'pickle' or import_string -# importstrings for methods to serialize message parts. If just -# 'json' or 'pickle', predefined JSON and pickle packers will be used. -# Otherwise, the entire importstring must be used. -# -# The functions must accept at least valid JSON input, and output -# *bytes*. -# -# For example, to use msgpack: -# packer = 'msgpack.packb', unpacker='msgpack.unpackb' -# pack/unpack : callables -# You can also set the pack/unpack callables for serialization directly. -# session : bytes -# the ID of this Session object. The default is to generate a new UUID. -# username : unicode -# username added to message headers. The default is to ask the OS. -# key : bytes -# The key used to initialize an HMAC signature. If unset, messages -# will not be signed or checked. -# keyfile : filepath -# The file containing a key. If this is set, `key` will be initialized -# to the contents of the file. - -## Threshold (in bytes) beyond which an object's buffer should be extracted to -# avoid pickling. -# Default: 1024 -# c.Session.buffer_threshold = 1024 - -## Whether to check PID to protect against calls after fork. -# -# This check can be disabled if fork-safety is handled elsewhere. -# Default: True -# c.Session.check_pid = True - -## Threshold (in bytes) beyond which a buffer should be sent without copying. -# Default: 65536 -# c.Session.copy_threshold = 65536 - -## Debug output in the Session -# Default: False -# c.Session.debug = False - -## The maximum number of digests to remember. -# -# The digest history will be culled when it exceeds this value. -# Default: 65536 -# c.Session.digest_history_size = 65536 - -## The maximum number of items for a container to be introspected for custom serialization. -# Containers larger than this are pickled outright. -# Default: 64 -# c.Session.item_threshold = 64 - -## execution key, for signing messages. -# Default: b'' -# c.Session.key = b'' - -## path to file containing execution key. -# Default: '' -# c.Session.keyfile = '' - -## Metadata dictionary, which serves as the default top-level metadata dict for -# each message. -# Default: {} -# c.Session.metadata = {} - -## The name of the packer for serializing messages. -# Should be one of 'json', 'pickle', or an import name -# for a custom callable serializer. -# Default: 'json' -# c.Session.packer = 'json' - -## The UUID identifying this session. -# Default: '' -# c.Session.session = '' - -## The digest scheme used to construct the message signatures. -# Must have the form 'hmac-HASH'. -# Default: 'hmac-sha256' -# c.Session.signature_scheme = 'hmac-sha256' - -## The name of the unpacker for unserializing messages. -# Only used with custom functions for `packer`. -# Default: 'json' -# c.Session.unpacker = 'json' - -## Username for the Session. Default is your system username. -# Default: 'builder' -# c.Session.username = 'builder' - -#------------------------------------------------------------------------------ -# MultiKernelManager(LoggingConfigurable) configuration -#------------------------------------------------------------------------------ -## A class for managing multiple kernels. - -## The name of the default kernel to start -# Default: 'python3' -# c.MultiKernelManager.default_kernel_name = 'python3' - -## The kernel manager class. This is configurable to allow -# subclassing of the KernelManager for customized behavior. -# Default: 'jupyter_client.ioloop.IOLoopKernelManager' -# c.MultiKernelManager.kernel_manager_class = 'jupyter_client.ioloop.IOLoopKernelManager' - -## Share a single zmq.Context to talk to all my kernels -# Default: True -# c.MultiKernelManager.shared_context = True - -#------------------------------------------------------------------------------ -# MappingKernelManager(MultiKernelManager) configuration -#------------------------------------------------------------------------------ -## A KernelManager that handles notebook mapping and HTTP error handling - -## White list of allowed kernel message types. -# When the list is empty, all message types are allowed. -# Default: [] -# c.MappingKernelManager.allowed_message_types = [] - -## Whether messages from kernels whose frontends have disconnected should be buffered in-memory. -# When True (default), messages are buffered and replayed on reconnect, -# avoiding lost messages due to interrupted connectivity. -# Disable if long-running kernels will produce too much output while -# no frontends are connected. -# Default: True -# c.MappingKernelManager.buffer_offline_messages = True - -## Whether to consider culling kernels which are busy. -# Only effective if cull_idle_timeout > 0. -# Default: False -# c.MappingKernelManager.cull_busy = False - -## Whether to consider culling kernels which have one or more connections. -# Only effective if cull_idle_timeout > 0. -# Default: False -# c.MappingKernelManager.cull_connected = False - -## Timeout (in seconds) after which a kernel is considered idle and ready to be culled. -# Values of 0 or lower disable culling. Very short timeouts may result in kernels being culled -# for users with poor network connections. -# Default: 0 -# c.MappingKernelManager.cull_idle_timeout = 0 - -## The interval (in seconds) on which to check for idle kernels exceeding the -# cull timeout value. -# Default: 300 -# c.MappingKernelManager.cull_interval = 300 - -## The name of the default kernel to start -# See also: MultiKernelManager.default_kernel_name -# c.MappingKernelManager.default_kernel_name = 'python3' - -## Timeout for giving up on a kernel (in seconds). -# On starting and restarting kernels, we check whether the -# kernel is running and responsive by sending kernel_info_requests. -# This sets the timeout in seconds for how long the kernel can take -# before being presumed dead. -# This affects the MappingKernelManager (which handles kernel restarts) -# and the ZMQChannelsHandler (which handles the startup). -# Default: 60 -# c.MappingKernelManager.kernel_info_timeout = 60 - -## The kernel manager class. This is configurable to allow -# See also: MultiKernelManager.kernel_manager_class -# c.MappingKernelManager.kernel_manager_class = 'jupyter_client.ioloop.IOLoopKernelManager' - -# Default: '' -# c.MappingKernelManager.root_dir = '' - -## Share a single zmq.Context to talk to all my kernels -# See also: MultiKernelManager.shared_context -# c.MappingKernelManager.shared_context = True - -#------------------------------------------------------------------------------ -# KernelSpecManager(LoggingConfigurable) configuration -#------------------------------------------------------------------------------ -## List of allowed kernel names. -# -# By default, all installed kernels are allowed. -# Default: set() -# c.KernelSpecManager.allowed_kernelspecs = set() - -## If there is no Python kernelspec registered and the IPython -# kernel is available, ensure it is added to the spec list. -# Default: True -# c.KernelSpecManager.ensure_native_kernel = True - -## The kernel spec class. This is configurable to allow -# subclassing of the KernelSpecManager for customized behavior. -# Default: 'jupyter_client.kernelspec.KernelSpec' -# c.KernelSpecManager.kernel_spec_class = 'jupyter_client.kernelspec.KernelSpec' - -## Deprecated, use `KernelSpecManager.allowed_kernelspecs` -# Default: set() -# c.KernelSpecManager.whitelist = set() - -#------------------------------------------------------------------------------ -# ContentsManager(LoggingConfigurable) configuration -#------------------------------------------------------------------------------ -## Base class for serving files and directories. -# -# This serves any text or binary file, -# as well as directories, -# with special handling for JSON notebook documents. -# -# Most APIs take a path argument, -# which is always an API-style unicode path, -# and always refers to a directory. -# -# - unicode, not url-escaped -# - '/'-separated -# - leading and trailing '/' will be stripped -# - if unspecified, path defaults to '', -# indicating the root path. - -## Allow access to hidden files -# Default: False -# c.ContentsManager.allow_hidden = False - -# Default: None -# c.ContentsManager.checkpoints = None - -# Default: 'notebook.services.contents.checkpoints.Checkpoints' -# c.ContentsManager.checkpoints_class = 'notebook.services.contents.checkpoints.Checkpoints' - -# Default: {} -# c.ContentsManager.checkpoints_kwargs = {} - -## handler class to use when serving raw file requests. -# -# Default is a fallback that talks to the ContentsManager API, -# which may be inefficient, especially for large files. -# -# Local files-based ContentsManagers can use a StaticFileHandler subclass, -# which will be much more efficient. -# -# Access to these files should be Authenticated. -# Default: 'notebook.files.handlers.FilesHandler' -# c.ContentsManager.files_handler_class = 'notebook.files.handlers.FilesHandler' - -## Extra parameters to pass to files_handler_class. -# -# For example, StaticFileHandlers generally expect a `path` argument -# specifying the root directory from which to serve files. -# Default: {} -# c.ContentsManager.files_handler_params = {} - -## Glob patterns to hide in file and directory listings. -# Default: ['__pycache__', '*.pyc', '*.pyo', '.DS_Store', '*.so', '*.dylib', '*~'] -# c.ContentsManager.hide_globs = ['__pycache__', '*.pyc', '*.pyo', '.DS_Store', '*.so', '*.dylib', '*~'] - -## Python callable or importstring thereof -# -# To be called on a contents model prior to save. -# -# This can be used to process the structure, -# such as removing notebook outputs or other side effects that -# should not be saved. -# -# It will be called as (all arguments passed by keyword):: -# -# hook(path=path, model=model, contents_manager=self) -# -# - model: the model to be saved. Includes file contents. -# Modifying this dict will affect the file that is stored. -# - path: the API path of the save destination -# - contents_manager: this ContentsManager instance -# Default: None -# c.ContentsManager.pre_save_hook = None - -# Default: '/' -# c.ContentsManager.root_dir = '/' - -## The base name used when creating untitled directories. -# Default: 'Untitled Folder' -# c.ContentsManager.untitled_directory = 'Untitled Folder' - -## The base name used when creating untitled files. -# Default: 'untitled' -# c.ContentsManager.untitled_file = 'untitled' - -## The base name used when creating untitled notebooks. -# Default: 'Untitled' -# c.ContentsManager.untitled_notebook = 'Untitled' - -#------------------------------------------------------------------------------ -# FileManagerMixin(Configurable) configuration -#------------------------------------------------------------------------------ -## Mixin for ContentsAPI classes that interact with the filesystem. -# -# Provides facilities for reading, writing, and copying both notebooks and -# generic files. -# -# Shared by FileContentsManager and FileCheckpoints. -# -# Note ---- Classes using this mixin must provide the following attributes: -# -# root_dir : unicode -# A directory against which API-style paths are to be resolved. -# -# log : logging.Logger - -## By default notebooks are saved on disk on a temporary file and then if successfully written, it replaces the old ones. -# This procedure, namely 'atomic_writing', causes some bugs on file system without operation order enforcement (like some networked fs). -# If set to False, the new notebook is written directly on the old one which could fail (eg: full filesystem or quota ) -# Default: True -# c.FileManagerMixin.use_atomic_writing = True - -#------------------------------------------------------------------------------ -# FileContentsManager(FileManagerMixin, ContentsManager) configuration -#------------------------------------------------------------------------------ -## Allow access to hidden files -# See also: ContentsManager.allow_hidden -# c.FileContentsManager.allow_hidden = False - -# See also: ContentsManager.checkpoints -# c.FileContentsManager.checkpoints = None - -# See also: ContentsManager.checkpoints_class -# c.FileContentsManager.checkpoints_class = 'notebook.services.contents.checkpoints.Checkpoints' - -# See also: ContentsManager.checkpoints_kwargs -# c.FileContentsManager.checkpoints_kwargs = {} - -## If True (default), deleting files will send them to the -# platform's trash/recycle bin, where they can be recovered. If False, -# deleting files really deletes them. -# Default: True -# c.FileContentsManager.delete_to_trash = True - -## handler class to use when serving raw file requests. -# See also: ContentsManager.files_handler_class -# c.FileContentsManager.files_handler_class = 'notebook.files.handlers.FilesHandler' - -## Extra parameters to pass to files_handler_class. -# See also: ContentsManager.files_handler_params -# c.FileContentsManager.files_handler_params = {} - -## -# See also: ContentsManager.hide_globs -# c.FileContentsManager.hide_globs = ['__pycache__', '*.pyc', '*.pyo', '.DS_Store', '*.so', '*.dylib', '*~'] - -## Python callable or importstring thereof -# -# to be called on the path of a file just saved. -# -# This can be used to process the file on disk, -# such as converting the notebook to a script or HTML via nbconvert. -# -# It will be called as (all arguments passed by keyword):: -# -# hook(os_path=os_path, model=model, contents_manager=instance) -# -# - path: the filesystem path to the file just written -# - model: the model representing the file -# - contents_manager: this ContentsManager instance -# Default: None -# c.FileContentsManager.post_save_hook = None - -## Python callable or importstring thereof -# See also: ContentsManager.pre_save_hook -# c.FileContentsManager.pre_save_hook = None - -# Default: '' -# c.FileContentsManager.root_dir = '' - -## DEPRECATED, use post_save_hook. Will be removed in Notebook 5.0 -# Default: False -# c.FileContentsManager.save_script = False - -## The base name used when creating untitled directories. -# See also: ContentsManager.untitled_directory -# c.FileContentsManager.untitled_directory = 'Untitled Folder' - -## The base name used when creating untitled files. -# See also: ContentsManager.untitled_file -# c.FileContentsManager.untitled_file = 'untitled' - -## The base name used when creating untitled notebooks. -# See also: ContentsManager.untitled_notebook -# c.FileContentsManager.untitled_notebook = 'Untitled' - -## By default notebooks are saved on disk on a temporary file and then if -# successfully written, it replaces the old ones. -# See also: FileManagerMixin.use_atomic_writing -# c.FileContentsManager.use_atomic_writing = True - -#------------------------------------------------------------------------------ -# NotebookNotary(LoggingConfigurable) configuration -#------------------------------------------------------------------------------ -## A class for computing and verifying notebook signatures. - -## The hashing algorithm used to sign notebooks. -# Choices: any of ['sha3_384', 'blake2b', 'sha1', 'md5', 'sha3_224', 'sha3_256', 'sha3_512', 'sha224', 'sha384', 'sha256', 'blake2s', 'sha512'] -# Default: 'sha256' -# c.NotebookNotary.algorithm = 'sha256' - -## The storage directory for notary secret and database. -# Default: '' -# c.NotebookNotary.data_dir = '' - -## The sqlite file in which to store notebook signatures. -# By default, this will be in your Jupyter data directory. -# You can set it to ':memory:' to disable sqlite writing to the filesystem. -# Default: '' -# c.NotebookNotary.db_file = '' - -## The secret key with which notebooks are signed. -# Default: b'' -# c.NotebookNotary.secret = b'' - -## The file where the secret key is stored. -# Default: '' -# c.NotebookNotary.secret_file = '' - -## A callable returning the storage backend for notebook signatures. -# The default uses an SQLite database. -# Default: traitlets.Undefined -# c.NotebookNotary.store_factory = traitlets.Undefined - -#------------------------------------------------------------------------------ -# AsyncMultiKernelManager(MultiKernelManager) configuration -#------------------------------------------------------------------------------ -## The name of the default kernel to start -# See also: MultiKernelManager.default_kernel_name -# c.AsyncMultiKernelManager.default_kernel_name = 'python3' - -## The kernel manager class. This is configurable to allow -# subclassing of the AsyncKernelManager for customized behavior. -# Default: 'jupyter_client.ioloop.AsyncIOLoopKernelManager' -# c.AsyncMultiKernelManager.kernel_manager_class = 'jupyter_client.ioloop.AsyncIOLoopKernelManager' - -## Share a single zmq.Context to talk to all my kernels -# See also: MultiKernelManager.shared_context -# c.AsyncMultiKernelManager.shared_context = True - -## Whether to make kernels available before the process has started. The -# kernel has a `.ready` future which can be awaited before connecting -# Default: False -# c.AsyncMultiKernelManager.use_pending_kernels = False - -#------------------------------------------------------------------------------ -# AsyncMappingKernelManager(MappingKernelManager, AsyncMultiKernelManager) configuration -#------------------------------------------------------------------------------ -## White list of allowed kernel message types. -# See also: MappingKernelManager.allowed_message_types -# c.AsyncMappingKernelManager.allowed_message_types = [] - -## Whether messages from kernels whose frontends have disconnected should be -# buffered in-memory. -# See also: MappingKernelManager.buffer_offline_messages -# c.AsyncMappingKernelManager.buffer_offline_messages = True - -## Whether to consider culling kernels which are busy. -# See also: MappingKernelManager.cull_busy -# c.AsyncMappingKernelManager.cull_busy = False - -## Whether to consider culling kernels which have one or more connections. -# See also: MappingKernelManager.cull_connected -# c.AsyncMappingKernelManager.cull_connected = False - -## Timeout (in seconds) after which a kernel is considered idle and ready to be -# culled. -# See also: MappingKernelManager.cull_idle_timeout -# c.AsyncMappingKernelManager.cull_idle_timeout = 0 - -## The interval (in seconds) on which to check for idle kernels exceeding the -# cull timeout value. -# See also: MappingKernelManager.cull_interval -# c.AsyncMappingKernelManager.cull_interval = 300 - -## The name of the default kernel to start -# See also: MultiKernelManager.default_kernel_name -# c.AsyncMappingKernelManager.default_kernel_name = 'python3' - -## Timeout for giving up on a kernel (in seconds). -# See also: MappingKernelManager.kernel_info_timeout -# c.AsyncMappingKernelManager.kernel_info_timeout = 60 - -## The kernel manager class. This is configurable to allow -# See also: AsyncMultiKernelManager.kernel_manager_class -# c.AsyncMappingKernelManager.kernel_manager_class = 'jupyter_client.ioloop.AsyncIOLoopKernelManager' - -# See also: MappingKernelManager.root_dir -# c.AsyncMappingKernelManager.root_dir = '' - -## Share a single zmq.Context to talk to all my kernels -# See also: MultiKernelManager.shared_context -# c.AsyncMappingKernelManager.shared_context = True - -## Whether to make kernels available before the process has started. The -# See also: AsyncMultiKernelManager.use_pending_kernels -# c.AsyncMappingKernelManager.use_pending_kernels = False - -#------------------------------------------------------------------------------ -# GatewayKernelManager(AsyncMappingKernelManager) configuration -#------------------------------------------------------------------------------ -## Kernel manager that supports remote kernels hosted by Jupyter Kernel or -# Enterprise Gateway. - -## White list of allowed kernel message types. -# See also: MappingKernelManager.allowed_message_types -# c.GatewayKernelManager.allowed_message_types = [] - -## Whether messages from kernels whose frontends have disconnected should be -# buffered in-memory. -# See also: MappingKernelManager.buffer_offline_messages -# c.GatewayKernelManager.buffer_offline_messages = True - -## Whether to consider culling kernels which are busy. -# See also: MappingKernelManager.cull_busy -# c.GatewayKernelManager.cull_busy = False - -## Whether to consider culling kernels which have one or more connections. -# See also: MappingKernelManager.cull_connected -# c.GatewayKernelManager.cull_connected = False - -## Timeout (in seconds) after which a kernel is considered idle and ready to be -# culled. -# See also: MappingKernelManager.cull_idle_timeout -# c.GatewayKernelManager.cull_idle_timeout = 0 - -## The interval (in seconds) on which to check for idle kernels exceeding the -# cull timeout value. -# See also: MappingKernelManager.cull_interval -# c.GatewayKernelManager.cull_interval = 300 - -## The name of the default kernel to start -# See also: MultiKernelManager.default_kernel_name -# c.GatewayKernelManager.default_kernel_name = 'python3' - -## Timeout for giving up on a kernel (in seconds). -# See also: MappingKernelManager.kernel_info_timeout -# c.GatewayKernelManager.kernel_info_timeout = 60 - -## The kernel manager class. This is configurable to allow -# See also: AsyncMultiKernelManager.kernel_manager_class -# c.GatewayKernelManager.kernel_manager_class = 'jupyter_client.ioloop.AsyncIOLoopKernelManager' - -# See also: MappingKernelManager.root_dir -# c.GatewayKernelManager.root_dir = '' - -## Share a single zmq.Context to talk to all my kernels -# See also: MultiKernelManager.shared_context -# c.GatewayKernelManager.shared_context = True - -## Whether to make kernels available before the process has started. The -# See also: AsyncMultiKernelManager.use_pending_kernels -# c.GatewayKernelManager.use_pending_kernels = False - -#------------------------------------------------------------------------------ -# GatewayKernelSpecManager(KernelSpecManager) configuration -#------------------------------------------------------------------------------ -## List of allowed kernel names. -# See also: KernelSpecManager.allowed_kernelspecs -# c.GatewayKernelSpecManager.allowed_kernelspecs = set() - -## If there is no Python kernelspec registered and the IPython -# See also: KernelSpecManager.ensure_native_kernel -# c.GatewayKernelSpecManager.ensure_native_kernel = True - -## The kernel spec class. This is configurable to allow -# See also: KernelSpecManager.kernel_spec_class -# c.GatewayKernelSpecManager.kernel_spec_class = 'jupyter_client.kernelspec.KernelSpec' - -## Deprecated, use `KernelSpecManager.allowed_kernelspecs` -# See also: KernelSpecManager.whitelist -# c.GatewayKernelSpecManager.whitelist = set() - -#------------------------------------------------------------------------------ -# GatewayClient(SingletonConfigurable) configuration -#------------------------------------------------------------------------------ -## This class manages the configuration. It's its own singleton class so that we -# can share these values across all objects. It also contains some helper methods -# to build request arguments out of the various config options. - -## The authorization token used in the HTTP headers. (JUPYTER_GATEWAY_AUTH_TOKEN -# env var) -# Default: None -# c.GatewayClient.auth_token = None - -## The filename of CA certificates or None to use defaults. -# (JUPYTER_GATEWAY_CA_CERTS env var) -# Default: None -# c.GatewayClient.ca_certs = None - -## The filename for client SSL certificate, if any. (JUPYTER_GATEWAY_CLIENT_CERT -# env var) -# Default: None -# c.GatewayClient.client_cert = None - -## The filename for client SSL key, if any. (JUPYTER_GATEWAY_CLIENT_KEY env var) -# Default: None -# c.GatewayClient.client_key = None - -## The time allowed for HTTP connection establishment with the Gateway server. -# (JUPYTER_GATEWAY_CONNECT_TIMEOUT env var) -# Default: 40.0 -# c.GatewayClient.connect_timeout = 40.0 - -## A comma-separated list of environment variable names that will be included, along with -# their values, in the kernel startup request. The corresponding `env_whitelist` configuration -# value must also be set on the Gateway server - since that configuration value indicates which -# environmental values to make available to the kernel. (JUPYTER_GATEWAY_ENV_WHITELIST env var) -# Default: '' -# c.GatewayClient.env_whitelist = '' - -## The time allowed for HTTP reconnection with the Gateway server for the first time. -# Next will be JUPYTER_GATEWAY_RETRY_INTERVAL multiplied by two in factor of numbers of retries -# but less than JUPYTER_GATEWAY_RETRY_INTERVAL_MAX. -# (JUPYTER_GATEWAY_RETRY_INTERVAL env var) -# Default: 1.0 -# c.GatewayClient.gateway_retry_interval = 1.0 - -## The maximum time allowed for HTTP reconnection retry with the Gateway server. -# (JUPYTER_GATEWAY_RETRY_INTERVAL_MAX env var) -# Default: 30.0 -# c.GatewayClient.gateway_retry_interval_max = 30.0 - -## The maximum retries allowed for HTTP reconnection with the Gateway server. -# (JUPYTER_GATEWAY_RETRY_MAX env var) -# Default: 5 -# c.GatewayClient.gateway_retry_max = 5 - -## Additional HTTP headers to pass on the request. This value will be converted to a dict. -# (JUPYTER_GATEWAY_HEADERS env var) -# Default: '{}' -# c.GatewayClient.headers = '{}' - -## The password for HTTP authentication. (JUPYTER_GATEWAY_HTTP_PWD env var) -# Default: None -# c.GatewayClient.http_pwd = None - -## The username for HTTP authentication. (JUPYTER_GATEWAY_HTTP_USER env var) -# Default: None -# c.GatewayClient.http_user = None - -## The gateway API endpoint for accessing kernel resources -# (JUPYTER_GATEWAY_KERNELS_ENDPOINT env var) -# Default: '/api/kernels' -# c.GatewayClient.kernels_endpoint = '/api/kernels' - -## The gateway API endpoint for accessing kernelspecs -# (JUPYTER_GATEWAY_KERNELSPECS_ENDPOINT env var) -# Default: '/api/kernelspecs' -# c.GatewayClient.kernelspecs_endpoint = '/api/kernelspecs' - -## The gateway endpoint for accessing kernelspecs resources -# (JUPYTER_GATEWAY_KERNELSPECS_RESOURCE_ENDPOINT env var) -# Default: '/kernelspecs' -# c.GatewayClient.kernelspecs_resource_endpoint = '/kernelspecs' - -## The time allowed for HTTP request completion. (JUPYTER_GATEWAY_REQUEST_TIMEOUT -# env var) -# Default: 40.0 -# c.GatewayClient.request_timeout = 40.0 - -## The url of the Kernel or Enterprise Gateway server where -# kernel specifications are defined and kernel management takes place. -# If defined, this Notebook server acts as a proxy for all kernel -# management and kernel specification retrieval. (JUPYTER_GATEWAY_URL env var) -# Default: None -# c.GatewayClient.url = None - -## For HTTPS requests, determines if server's certificate should be validated or not. -# (JUPYTER_GATEWAY_VALIDATE_CERT env var) -# Default: True -# c.GatewayClient.validate_cert = True - -## The websocket url of the Kernel or Enterprise Gateway server. If not provided, this value -# will correspond to the value of the Gateway url with 'ws' in place of 'http'. (JUPYTER_GATEWAY_WS_URL env var) -# Default: None -# c.GatewayClient.ws_url = None - -#------------------------------------------------------------------------------ -# TerminalManager(LoggingConfigurable) configuration -#------------------------------------------------------------------------------ -## - -## Timeout (in seconds) in which a terminal has been inactive and ready to be culled. -# Values of 0 or lower disable culling. -# Default: 0 -# c.TerminalManager.cull_inactive_timeout = 0 - -## The interval (in seconds) on which to check for terminals exceeding the -# inactive timeout value. -# Default: 300 -# c.TerminalManager.cull_interval = 300 diff --git a/.jupyter/migrated b/.jupyter/migrated deleted file mode 100644 index 5161bba8..00000000 --- a/.jupyter/migrated +++ /dev/null @@ -1 +0,0 @@ -2022-12-09T11:41:56.383203 \ No newline at end of file diff --git a/Taskfile.yml b/Taskfile.yml index ace86856..ddd7445b 100644 --- a/Taskfile.yml +++ b/Taskfile.yml @@ -52,12 +52,9 @@ tasks: - task: check-pyright check-pyright: - desc: Execute pyright conditionally - currently does not work in 3.12 (https://github.com/ekalinin/nodeenv/issues/341) + desc: Execute pyright cmds: - - | - if [ "{{.PYVERSION}}" != "3.12" ]; then - ./scripts/builder.sh python3 -m pyright --stats archivist - fi + - ./scripts/builder.sh python3 -m pyright --stats archivist check-fixes: desc: Show proposed fixes from ruff diff --git a/archivist/appidp.py b/archivist/appidp.py index a2ba7467..e5cc5bbb 100644 --- a/archivist/appidp.py +++ b/archivist/appidp.py @@ -83,5 +83,6 @@ def token(self, client_id: str, client_secret: str) -> AppIDP: "client_secret": client_secret, }, data=True, + no_auth=True, ) ) diff --git a/archivist/archivist.py b/archivist/archivist.py index e6e25bed..36babfd5 100644 --- a/archivist/archivist.py +++ b/archivist/archivist.py @@ -33,10 +33,13 @@ from copy import deepcopy from logging import getLogger from time import time -from typing import Any, BinaryIO +from typing import TYPE_CHECKING, Any, BinaryIO from requests_toolbelt.multipart.encoder import MultipartEncoder +if TYPE_CHECKING: + from requests.models import Response + from .access_policies import _AccessPoliciesClient from .appidp import _AppIDPClient from .applications import _ApplicationsClient @@ -49,6 +52,9 @@ from .composite import _CompositeClient from .confirmer import MAX_TIME from .constants import ( + AUTHORIZATION_KEY, + BEARER_PREFIX, + BINARY_CONTENT, ROOT, SEP, ) @@ -224,26 +230,58 @@ def __copy__(self) -> "Archivist": arch._user_agent = self._user_agent return arch - def _add_headers(self, headers: "dict[str,str]|None") -> "dict[str,Any]": + def _add_headers( + self, headers: "dict[str,str]|None", no_auth: bool = False + ) -> "dict[str,Any]": newheaders = super()._add_headers(headers) - auth = self.auth # this may trigger a refetch so only do it once here - # for appidp endpoint there may not be an authtoken - if auth is not None: - newheaders["authorization"] = "Bearer " + auth.strip() + # there may not be an authtoken required + if not no_auth: + auth = self.auth # this may trigger a refetch so only do it once here + if auth is not None: + newheaders[AUTHORIZATION_KEY] = BEARER_PREFIX + " " + auth.strip() return newheaders # currently only the archivist endpoint is allowed to create/modify data. # this may change... @retry_429 + def __post( + self, + url: str, + request: "dict[str,Any] | bytes | None", + *, + headers: "dict[str,Any] | None" = None, + data: bool = False, + no_auth: bool = False, + ) -> "Response": + if data: + response = self.session.post( + url, + data=request, + headers=self._add_headers(headers, no_auth=no_auth), + ) + else: + response = self.session.post( + url, + json=request, + headers=self._add_headers(headers, no_auth=no_auth), + ) + + error = _parse_response(response) + if error is not None: + raise error + + return response + def post( self, url: str, - request: "dict[str,Any]|None", + request: "dict[str,Any] | None", *, - headers: "dict[str,Any]|None" = None, - data: "dict[str, Any] | bool" = False, + headers: "dict[str,Any] | None" = None, + data: bool = False, + no_auth: bool = False, ) -> "dict[str, Any]": """POST method (REST) @@ -254,27 +292,43 @@ def post( request (dict): request body defining the entity headers (dict): optional REST headers data (bool): send as form-encoded and not as json + no_auth (bool): strip authorization from headers Returns: dict representing the response body (entity). """ - if data: - response = self.session.post( - url, - data=request, - ) - else: - response = self.session.post( - url, - json=request, - headers=self._add_headers(headers), - ) + response = self.__post( + url, request, headers=headers, data=data, no_auth=no_auth + ) + return response.json() - error = _parse_response(response) - if error is not None: - raise error + def post_binary( + self, + url: str, + request: bytes, + *, + headers: "dict[str,Any] | None" = None, + no_auth: bool = False, + ) -> bytes: + """POST method - return response.json() + Creates an entity + + Args: + url (str): e.g. v1/publicscitt/entries + request (bytes): binary input data + headers (dict): optional REST headers + no_auth (bool): strip authorization from headers + + Returns: + bytes representing the response data. + """ + newheaders = {**headers} if headers is not None else {} + newheaders["content-type"] = BINARY_CONTENT + response = self.__post( + url, request, headers=newheaders, data=True, no_auth=no_auth + ) + return response.content @retry_429 def post_file( diff --git a/archivist/archivistpublic.py b/archivist/archivistpublic.py index 50a4db0f..b74b5179 100644 --- a/archivist/archivistpublic.py +++ b/archivist/archivistpublic.py @@ -209,9 +209,30 @@ def _add_headers(self, headers: "dict[str, str]|None") -> "dict[str, str]": return newheaders # the public endpoint is currently readonly so only read-type methods are - # defined here. This may change - the Public endpoint may allow writes - # in future... + # defined here. @retry_429 + def __get( + self, + url: str, + *, + headers: "dict[str, str]|None" = None, + params: "dict[str, Any]|None" = None, + ) -> "Response": + + response = self.session.get( + url, + headers=self._add_headers(headers), + params=_dotdict(params), + ) + + self._response_ring_buffer.appendleft(response) + + error = _parse_response(response) + if error is not None: + raise error + + return response + def get( self, url: str, @@ -230,19 +251,29 @@ def get( dict representing the response body (entity). """ - response = self.session.get( - url, - headers=self._add_headers(headers), - params=_dotdict(params), - ) + response = self.__get(url, headers=headers, params=params) + return response.json() - self._response_ring_buffer.appendleft(response) + def get_binary( + self, + url: str, + *, + headers: "dict[str, str]|None" = None, + params: "dict[str, Any]|None" = None, + ) -> bytes: + """GET method - error = _parse_response(response) - if error is not None: - raise error + Args: + url (str): e.g. https://app.datatrails.ai/archivist/v2/publicassets/xxxxxxxxxxxxxxxxxx + headers (dict): optional REST headers + params (dict): optional params strings - return response.json() + Returns: + bytes representing the response content. + + """ + response = self.__get(url, headers=headers, params=params) + return response.content @retry_429 def get_file( diff --git a/archivist/constants.py b/archivist/constants.py index 065dc16c..2a7a4418 100644 --- a/archivist/constants.py +++ b/archivist/constants.py @@ -17,6 +17,12 @@ USER_AGENT_PREFIX = "pysdk/" PARTNER_ID = "DataTrails-Partner-ID" +# bearer token +AUTHORIZATION_KEY = "authorization" +BEARER_PREFIX = "Bearer" + +BINARY_CONTENT = "application/octet-stream" + # define in MIME canonical form HEADERS_REQUEST_TOTAL_COUNT = "X-Request-Total-Count" HEADERS_TOTAL_COUNT = "X-Total-Count" diff --git a/archivist/notebooks/Initialization and Credentials.ipynb b/archivist/notebooks/Initialization and Credentials.ipynb index a7159419..10b787dc 100644 --- a/archivist/notebooks/Initialization and Credentials.ipynb +++ b/archivist/notebooks/Initialization and Credentials.ipynb @@ -10,46 +10,45 @@ }, { "cell_type": "code", - "execution_count": 1, - "id": "956bfa08-d9cf-4e0a-9221-05a8b03da7c8", + "execution_count": 6, + "id": "08eb7c4b-d5fb-4822-8663-22a88d3a0420", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "Requirement already satisfied: datatrails-archivist in /home/paul/.config/jupyterlab-desktop/jlab_server/lib/python3.8/site-packages (0.18.1)\n", - "Requirement already satisfied: requests~=2.28 in /home/paul/.config/jupyterlab-desktop/jlab_server/lib/python3.8/site-packages (from datatrails-archivist) (2.28.1)\n", - "Requirement already satisfied: pyaml-env~=1.1 in /home/paul/.config/jupyterlab-desktop/jlab_server/lib/python3.8/site-packages (from datatrails-archivist) (1.2.1)\n", - "Requirement already satisfied: requests-toolbelt~=0.9 in /home/paul/.config/jupyterlab-desktop/jlab_server/lib/python3.8/site-packages (from datatrails-archivist) (0.10.1)\n", - "Requirement already satisfied: backoff~=1.11 in /home/paul/.config/jupyterlab-desktop/jlab_server/lib/python3.8/site-packages (from datatrails-archivist) (1.11.1)\n", - "Requirement already satisfied: certifi in /home/paul/.config/jupyterlab-desktop/jlab_server/lib/python3.8/site-packages (from datatrails-archivist) (2022.12.7)\n", - "Requirement already satisfied: rfc3339~=6.2 in /home/paul/.config/jupyterlab-desktop/jlab_server/lib/python3.8/site-packages (from datatrails-archivist) (6.2)\n", - "Requirement already satisfied: xmltodict~=0.13 in /home/paul/.config/jupyterlab-desktop/jlab_server/lib/python3.8/site-packages (from datatrails-archivist) (0.13.0)\n", - "Requirement already satisfied: iso8601~=1.0 in /home/paul/.config/jupyterlab-desktop/jlab_server/lib/python3.8/site-packages (from datatrails-archivist) (1.1.0)\n", - "Requirement already satisfied: flatten-dict~=0.4 in /home/paul/.config/jupyterlab-desktop/jlab_server/lib/python3.8/site-packages (from datatrails-archivist) (0.4.2)\n", - "Requirement already satisfied: Jinja2~=3.0 in /home/paul/.config/jupyterlab-desktop/jlab_server/lib/python3.8/site-packages (from datatrails-archivist) (3.1.2)\n", - "Requirement already satisfied: six<2.0,>=1.12 in /home/paul/.config/jupyterlab-desktop/jlab_server/lib/python3.8/site-packages (from flatten-dict~=0.4->datatrails-archivist) (1.16.0)\n", - "Requirement already satisfied: MarkupSafe>=2.0 in /home/paul/.config/jupyterlab-desktop/jlab_server/lib/python3.8/site-packages (from Jinja2~=3.0->datatrails-archivist) (2.1.1)\n", - "Requirement already satisfied: PyYAML<=7.0,>=5.0 in /home/paul/.config/jupyterlab-desktop/jlab_server/lib/python3.8/site-packages (from pyaml-env~=1.1->datatrails-archivist) (6.0)\n", - "Requirement already satisfied: charset-normalizer<3,>=2 in /home/paul/.config/jupyterlab-desktop/jlab_server/lib/python3.8/site-packages (from requests~=2.28->datatrails-archivist) (2.1.1)\n", - "Requirement already satisfied: idna<4,>=2.5 in /home/paul/.config/jupyterlab-desktop/jlab_server/lib/python3.8/site-packages (from requests~=2.28->datatrails-archivist) (3.4)\n", - "Requirement already satisfied: urllib3<1.27,>=1.21.1 in /home/paul/.config/jupyterlab-desktop/jlab_server/lib/python3.8/site-packages (from requests~=2.28->datatrails-archivist) (1.26.13)\n", - "Note: you may need to restart the kernel to use updated packages.\n", - "Requirement already satisfied: python-dotenv in /home/paul/.config/jupyterlab-desktop/jlab_server/lib/python3.8/site-packages (0.21.0)\n", - "Note: you may need to restart the kernel to use updated packages.\n" + "Requirement already satisfied: datatrails-archivist in /home/paul/git/datatrails-python/datatrails-venv/lib/python3.10/site-packages (0.33.2.post1+git.8e1ac7c7.dirty)\n", + "Requirement already satisfied: requests-toolbelt~=1.0 in /home/paul/git/datatrails-python/datatrails-venv/lib/python3.10/site-packages (from datatrails-archivist) (1.0.0)\n", + "Requirement already satisfied: certifi in /home/paul/git/datatrails-python/datatrails-venv/lib/python3.10/site-packages (from datatrails-archivist) (2024.8.30)\n", + "Requirement already satisfied: Jinja2~=3.1 in /home/paul/git/datatrails-python/datatrails-venv/lib/python3.10/site-packages (from datatrails-archivist) (3.1.4)\n", + "Requirement already satisfied: pyaml-env~=1.2 in /home/paul/git/datatrails-python/datatrails-venv/lib/python3.10/site-packages (from datatrails-archivist) (1.2.1)\n", + "Requirement already satisfied: backoff~=2.2.1 in /home/paul/git/datatrails-python/datatrails-venv/lib/python3.10/site-packages (from datatrails-archivist) (2.2.1)\n", + "Requirement already satisfied: flatten-dict~=0.4 in /home/paul/git/datatrails-python/datatrails-venv/lib/python3.10/site-packages (from datatrails-archivist) (0.4.2)\n", + "Requirement already satisfied: rfc3339~=6.2 in /home/paul/git/datatrails-python/datatrails-venv/lib/python3.10/site-packages (from datatrails-archivist) (6.2)\n", + "Requirement already satisfied: requests~=2.32 in /home/paul/git/datatrails-python/datatrails-venv/lib/python3.10/site-packages (from datatrails-archivist) (2.32.3)\n", + "Requirement already satisfied: iso8601~=2.1 in /home/paul/git/datatrails-python/datatrails-venv/lib/python3.10/site-packages (from datatrails-archivist) (2.1.0)\n", + "Requirement already satisfied: xmltodict~=0.14 in /home/paul/git/datatrails-python/datatrails-venv/lib/python3.10/site-packages (from datatrails-archivist) (0.14.2)\n", + "Requirement already satisfied: six<2.0,>=1.12 in /home/paul/git/datatrails-python/datatrails-venv/lib/python3.10/site-packages (from flatten-dict~=0.4->datatrails-archivist) (1.16.0)\n", + "Requirement already satisfied: MarkupSafe>=2.0 in /home/paul/git/datatrails-python/datatrails-venv/lib/python3.10/site-packages (from Jinja2~=3.1->datatrails-archivist) (3.0.2)\n", + "Requirement already satisfied: PyYAML<=7.0,>=5.0 in /home/paul/git/datatrails-python/datatrails-venv/lib/python3.10/site-packages (from pyaml-env~=1.2->datatrails-archivist) (6.0.2)\n", + "Requirement already satisfied: idna<4,>=2.5 in /home/paul/git/datatrails-python/datatrails-venv/lib/python3.10/site-packages (from requests~=2.32->datatrails-archivist) (3.10)\n", + "Requirement already satisfied: urllib3<3,>=1.21.1 in /home/paul/git/datatrails-python/datatrails-venv/lib/python3.10/site-packages (from requests~=2.32->datatrails-archivist) (2.2.3)\n", + "Requirement already satisfied: charset-normalizer<4,>=2 in /home/paul/git/datatrails-python/datatrails-venv/lib/python3.10/site-packages (from requests~=2.32->datatrails-archivist) (3.4.0)\n", + "/bin/bash: -c: line 1: syntax error near unexpected token `-m'\n", + "/bin/bash: -c: line 1: `(sys.executable) -m pip install --upgrade python-dotenv'\n" ] } ], "source": [ "# Install the datatrails-python package\n", - "%python3 -m pip install --upgrade datatrails-archivist\n", - "%python3 -m pip install --upgrade python-dotenv" + "!{sys.executable} -m pip install --upgrade datatrails-archivist\n", + "!(sys.executable) -m pip install --upgrade python-dotenv" ] }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 7, "id": "5bb82001-bcab-497e-85dc-5809621169be", "metadata": {}, "outputs": [ @@ -57,29 +56,31 @@ "name": "stdout", "output_type": "stream", "text": [ - "DATATRAILS_URL=\"https://app.datatrails.ai\"\n", - "DATATRAILS_APPREG_CLIENT=\"437bd138-dade-4346-aadd-dfdfee51ddf4\"\n", - "DATATRAILS_APPREG_SECRET=\"d26a00cafd9c550228ab3dc9c2303f4c0f79fe9fd00d80fdc92ff2844cdc283e\"\n", + "DATATRAILS_URL=\"https://app.dev-paul-0.dev.datatrails.ai\"\n", + "DATATRAILS_APPREG_CLIENT=\"c0232b72-aa68-4596-a9a4-b3f5d3e55255\"\n", + "DATATRAILS_APPREG_SECRET=\"5854b9f1b0559db5daf6f57a53e80ac31ac4b14f88dfa679be59d1569acade20\"\n", "DATATRAILS_ARTIST_ATTACHMENT=\"test_files/pexels-andrea-turner-707697.jpeg\"\n", - "DATATRAILS_UNIQUE_ID=\"666769323\"\n", + "DATATRAILS_UNIQUE_ID=\"558232363\"\n", "\n" ] } ], "source": [ + "from os import environ\n", "from random import randint\n", "\n", "unique_id = randint(1, 1_000_000_000)\n", "\n", + "datatrails_url = environ.get(\"DATATRAILS_URL\", \"https//app.datatrails.ai\")\n", + "datatrails_appreg_client = environ[\"DATATRAILS_APPREG_CLIENT\"]\n", + "datatrails_appreg_secret = environ[\"DATATRAILS_APPREG_SECRET\"]\n", + "datatrails_artist_attachment = environ[\"DATATRAILS_ARTIST_ATTACHMENT\"]\n", + "\n", "with open(\"notebooks.env\", \"w\", encoding=\"utf-8\") as fd:\n", - " fd.write('DATATRAILS_URL=\"https://app.datatrails.ai\"\\n')\n", - " fd.write('DATATRAILS_APPREG_CLIENT=\"437bd138-dade-4346-aadd-dfdfee51ddf4\"\\n')\n", - " fd.write(\n", - " 'DATATRAILS_APPREG_SECRET=\"d26a00cafd9c550228ab3dc9c2303f4c0f79fe9fd00d80fdc92ff2844cdc283e\"\\n'\n", - " )\n", - " fd.write(\n", - " 'DATATRAILS_ARTIST_ATTACHMENT=\"test_files/pexels-andrea-turner-707697.jpeg\"\\n'\n", - " )\n", + " fd.write(f'DATATRAILS_URL=\"{datatrails_url}\"\\n')\n", + " fd.write(f'DATATRAILS_APPREG_CLIENT=\"{datatrails_appreg_client}\"\\n')\n", + " fd.write(f'DATATRAILS_APPREG_SECRET=\"{datatrails_appreg_secret}\"\\n')\n", + " fd.write(f'DATATRAILS_ARTIST_ATTACHMENT=\"{datatrails_artist_attachment}\"\\n')\n", " fd.write(f'DATATRAILS_UNIQUE_ID=\"{unique_id}\"\\n')\n", "\n", "with open(\"notebooks.env\", \"r\") as fd:\n", @@ -111,7 +112,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.15" + "version": "3.10.12" } }, "nbformat": 4, diff --git a/archivist/utils.py b/archivist/utils.py index 083b92ce..8eec7920 100644 --- a/archivist/utils.py +++ b/archivist/utils.py @@ -60,6 +60,7 @@ def get_auth( auth_token_filename=None, auth_token=None, client_id=None, + client_filename=None, client_secret_filename=None, client_secret=None, ): # pragma: no cover @@ -76,6 +77,10 @@ def get_auth( return auth_token + if client_id is None and client_filename is not None: + with open(client_filename, mode="r", encoding="utf-8") as tokenfile: + client_id = tokenfile.read().strip() + if client_id is not None: if client_secret_filename is not None: with open(client_secret_filename, mode="r", encoding="utf-8") as tokenfile: diff --git a/docs/notebooks/requirements.txt b/docs/notebooks/requirements.txt index 61f1c130..ca1d72f8 100644 --- a/docs/notebooks/requirements.txt +++ b/docs/notebooks/requirements.txt @@ -1,12 +1,7 @@ # # jupyter notebooks # -ipython~=8.13.0; python_version < '3.10' -ipython~=8.20; python_version >= '3.10' -notebook~=7.2 -jupyter~=1.0 +jupyter~=1.1.1 jupyter-console~=6.6 -jupyter-contrib-nbextensions~=0.7 -jupyter-nbextensions-configurator~=0.6 python-dotenv[cli]~=1.0 diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index 941eebda..fd6bb19e 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -35,6 +35,7 @@ merkle mimetype onwards params +publicscitt publicurl py sbom diff --git a/functests/execcompliance_policies.py b/functests/execcompliance_policies.py index cf9befd8..2137eac4 100644 --- a/functests/execcompliance_policies.py +++ b/functests/execcompliance_policies.py @@ -97,10 +97,7 @@ class TestCompliancePoliciesBase(TestCase): def setUp(self): auth = get_auth( - auth_token=getenv("DATATRAILS_AUTHTOKEN"), - auth_token_filename=getenv("DATATRAILS_AUTHTOKEN_FILENAME"), - client_id=getenv("DATATRAILS_APPREG_CLIENT"), - client_secret=getenv("DATATRAILS_APPREG_SECRET"), + client_filename=getenv("DATATRAILS_APPREG_CLIENT_FILENAME"), client_secret_filename=getenv("DATATRAILS_APPREG_SECRET_FILENAME"), ) self.arch = Archivist( diff --git a/functests/execnotebooks.py b/functests/execnotebooks.py index 434d7660..3a58765d 100644 --- a/functests/execnotebooks.py +++ b/functests/execnotebooks.py @@ -3,8 +3,7 @@ """ from os import getenv -from sys import version_info -from unittest import skip, skipIf +from unittest import skip from testbook import testbook @@ -26,10 +25,6 @@ LOGGER = logger.LOGGER -@skipIf( - version_info >= (3, 12), - "cannot run test as notebooks unsupported in 3.12", -) class TestNotebooks(TestCase): """ Test Archivist Notebooks diff --git a/scripts/notebooks.sh b/scripts/notebooks.sh index 97016934..face0d1b 100755 --- a/scripts/notebooks.sh +++ b/scripts/notebooks.sh @@ -17,17 +17,53 @@ then exit 1 fi -NOTEBOOKDIR=datatrails-venv/notebooks +set -x +if [ -z "${DATATRAILS_URL}" ] +then + export DATATRAILS_URL="https://app.datatrails.ai" +fi +if [ -n "${DATATRAILS_APPREG_CLIENT_FILENAME}" ] +then + if [ -s "${DATATRAILS_APPREG_CLIENT_FILENAME}" ] + then + export DATATRAILS_APPREG_CLIENT=$(cat ${DATATRAILS_APPREG_CLIENT_FILENAME}) + fi +fi +if [ -z "${DATATRAILS_APPREG_CLIENT}" ] +then + echo "DATATRAILS_APPREG_CLIENT is not set" + exit 1 +fi +if [ -n "${DATATRAILS_APPREG_SECRET_FILENAME}" ] +then + if [ -s "${DATATRAILS_APPREG_SECRET_FILENAME}" ] + then + export DATATRAILS_APPREG_SECRET=$(cat ${DATATRAILS_APPREG_SECRET_FILENAME}) + fi +fi +if [ -z "${DATATRAILS_APPREG_SECRET}" ] +then + echo "DATATRAILS_APPREG_SECRET is not set" + exit 1 +fi + +NOTEBOOKDIR=$(pwd)/datatrails-venv/notebooks export DATATRAILS_ARTIST_ATTACHMENT="test_files/pexels-andrea-turner-707697.jpeg" export DATATRAILS_UNIQUE_ID=${SRANDOM} source datatrails-venv/bin/activate +trap deactivate EXIT + mkdir -p "${NOTEBOOKDIR}" # The customer will download the notebooks from python.datatrails.ai but # we will copy locally -cp archivist/notebooks/*.ipynb "${NOTEBOOKDIR}"/ -cp -r archivist/notebooks/test_files "${NOTEBOOKDIR}"/ +DIR=$(pwd) +cd archivist/notebooks +jupyter trust *.ipynb +cp *.ipynb "${NOTEBOOKDIR}"/ +cp -r test_files "${NOTEBOOKDIR}"/ +cd $DIR +jupyter notebook --help jupyter notebook --ip 0.0.0.0 --notebook-dir="${NOTEBOOKDIR}" -deactivate diff --git a/scripts/venv.sh b/scripts/venv.sh index c22a3924..61e19738 100755 --- a/scripts/venv.sh +++ b/scripts/venv.sh @@ -19,7 +19,7 @@ fi rm -rf datatrails-venv python3 -m venv datatrails-venv source datatrails-venv/bin/activate +trap deactivate EXIT python3 -m pip install --force-reinstall wheel python3 -m pip install --force-reinstall dist/datatrails_archivist-*.whl python3 -m pip install --force-reinstall -r docs/notebooks/requirements.txt -deactivate diff --git a/unittests/mock_response.py b/unittests/mock_response.py index c1226da4..157e38b2 100644 --- a/unittests/mock_response.py +++ b/unittests/mock_response.py @@ -9,18 +9,29 @@ class MockResponse(dict): def __init__( - self, status_code, request=None, headers=None, iter_content=None, **kwargs + self, + status_code, + request=None, + headers=None, + content=None, + iter_content=None, + **kwargs, ): super().__init__(**kwargs) self.status_code = status_code self._headers = headers self._request = request + self._content = content self._iter_content = iter_content @property def url(self): return "url" + @property + def content(self): + return self._content + @property def request(self): return self._request diff --git a/unittests/testappidp.py b/unittests/testappidp.py index fd8dc62a..61c564aa 100644 --- a/unittests/testappidp.py +++ b/unittests/testappidp.py @@ -4,12 +4,15 @@ from unittest import TestCase, mock +from archivist.about import __version__ as VERSION from archivist.archivist import Archivist from archivist.constants import ( APPIDP_LABEL, APPIDP_SUBPATH, APPIDP_TOKEN, ROOT, + USER_AGENT, + USER_AGENT_PREFIX, ) from .mock_response import MockResponse @@ -78,6 +81,9 @@ def test_appidp_token_create(self): self.assertEqual( kwargs, { + "headers": { + USER_AGENT: f"{USER_AGENT_PREFIX}{VERSION}", + }, "data": REQUEST, }, msg="CREATE method kwargs called incorrectly", diff --git a/unittests/testarchivistget.py b/unittests/testarchivistget.py index c0993ec7..81844c70 100644 --- a/unittests/testarchivistget.py +++ b/unittests/testarchivistget.py @@ -63,6 +63,35 @@ def test_get(self): msg="GET method called incorrectly", ) + def test_get_binary(self): + """ + Test default get_binary method + """ + content = bytearray() + content.extend(b"response") + with mock.patch.object(self.arch.session, "get") as mock_get: + mock_get.return_value = MockResponse(200, content=content) + result = self.arch.get_binary("path/path/entity/xxxxxxxx") + self.assertEqual( + tuple(mock_get.call_args), + ( + ("path/path/entity/xxxxxxxx",), + { + "headers": { + "authorization": "Bearer authauthauth", + USER_AGENT: f"{USER_AGENT_PREFIX}{VERSION}", + }, + "params": None, + }, + ), + msg="GET method called incorrectly", + ) + self.assertEqual( + result, + content, + msg="GET result is incorrect", + ) + def test_ring_buffer(self): """ Test That the ring buffer for response objects works as expected diff --git a/unittests/testarchivistpost.py b/unittests/testarchivistpost.py index a78a14b4..e7d99758 100644 --- a/unittests/testarchivistpost.py +++ b/unittests/testarchivistpost.py @@ -8,6 +8,7 @@ from archivist.about import __version__ as VERSION from archivist.archivist import Archivist from archivist.constants import ( + BINARY_CONTENT, HEADERS_RETRY_AFTER, USER_AGENT, USER_AGENT_PREFIX, @@ -67,6 +68,41 @@ def test_post(self): msg="POST method kwargs called incorrectly", ) + def test_post_binary(self): + """ + Test default post method + """ + request = bytearray() + request.extend(b"request") + content = bytearray() + content.extend(b"response") + with mock.patch.object(self.arch.session, "post") as mock_post: + mock_post.return_value = MockResponse(200, content=content) + result = self.arch.post_binary("path/path", request) + args, kwargs = mock_post.call_args + self.assertEqual( + args, + ("path/path",), + msg="POST method args called incorrectly", + ) + self.assertEqual( + kwargs, + { + "data": request, + "headers": { + "authorization": "Bearer authauthauth", + USER_AGENT: f"{USER_AGENT_PREFIX}{VERSION}", + "content-type": BINARY_CONTENT, + }, + }, + msg="POST method kwargs called incorrectly", + ) + self.assertEqual( + result, + content, + msg="POST result is incorrect", + ) + def test_post_with_error(self): """ Test post method with error