diff --git a/src/ducktools/env/_lazy_imports.py b/src/ducktools/env/_lazy_imports.py index 3b3e52c..6b22deb 100644 --- a/src/ducktools/env/_lazy_imports.py +++ b/src/ducktools/env/_lazy_imports.py @@ -37,6 +37,7 @@ ModuleImport("json"), ModuleImport("re"), ModuleImport("shutil"), + ModuleImport("sqlite3", asname="sql"), ModuleImport("subprocess"), ModuleImport("tempfile"), ModuleImport("warnings"), diff --git a/src/ducktools/env/_sqlclasses.py b/src/ducktools/env/_sqlclasses.py new file mode 100644 index 0000000..6048b23 --- /dev/null +++ b/src/ducktools/env/_sqlclasses.py @@ -0,0 +1,352 @@ +# ducktools.env +# MIT License +# +# Copyright (c) 2024 David C Ellis +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +# This is a minimal object/database wrapper for ducktools.classbuilder +# Execute the class to see examples of the methods that will be generated + +import itertools + +from ducktools.classbuilder import ( + SlotMakerMeta, + builder, + make_unified_gatherer, +) + +from ducktools.classbuilder.prefab import ( + PREFAB_FIELDS, + Attribute, + as_dict, + eq_maker, + get_attributes, + init_maker, + repr_maker, +) + + +TYPE_MAP = { + None: "NULL", + int: "INTEGER", + bool: "INTEGER", + float: "REAL", + str: "TEXT", + str | None: "TEXT", + bytes: "BLOB", + list[str]: "TEXT" # lists of strings are converted to delimited strings +} + +MAPPED_TYPES = None | int | bool | float | str | bytes | list[str] + + +class SQLAttribute(Attribute): + """ + A Special attribute for SQL tables + + :param unique: Should this field be unique in the table + :param internal: Should this field be excluded from the table + """ + primary_key: bool = False + unique: bool = False + internal: bool = False + computed: str | None = None + + def validate_field(self): + super().validate_field() + if self.primary_key and self.unique: + raise AttributeError("Primary key fields are already unique") + + +def get_sql_fields(cls: "SQLMeta") -> dict[str, SQLAttribute]: + return get_attributes(cls) # noqa + + +unified_gatherer = make_unified_gatherer(SQLAttribute) + + +def flatten_list(strings: list[str], *, delimiter=";") -> str: + return delimiter.join(strings) + + +def separate_list(string: str, *, delimiter=";") -> list[str]: + return string.split(delimiter) if string else [] + + +def caps_to_snake(name: str): + letters = [name[0].lower()] + for previous, current in itertools.pairwise(name): + if current.isupper() and not previous.isupper(): + letters.append("_") + letters.append(current.lower()) + return "".join(letters) + + +class SQLMeta(SlotMakerMeta): + TABLE_NAME: str + VALID_FIELDS: dict[str, SQLAttribute] + COMPUTED_FIELDS: set[str] + PRIMARY_KEY: str + STR_LIST_COLUMNS: set[str] + BOOL_COLUMNS: set[str] + + +default_methods = frozenset({init_maker, repr_maker, eq_maker}) + + +class SQLClass(metaclass=SQLMeta): + _meta_gatherer = unified_gatherer + __slots__ = {} + + def __init_subclass__( + cls, + *, + methods=default_methods, + gatherer=unified_gatherer, + **kwargs, + ): + slots = "__slots__" in cls.__dict__ + builder(cls, gatherer=gatherer, methods=methods, flags={"slotted": slots, "kw_only": True}) + + fields = get_sql_fields(cls) + valid_fields = {} + split_columns = set() + bools = set() + computed_fields = set() + + for name, value in fields.items(): + if value.computed: + computed_fields.add(name) + if not value.internal: + valid_fields[name] = value + + if value.type == list[str]: + split_columns.add(name) + elif value.type is bool: + bools.add(name) + + cls.VALID_FIELDS = valid_fields + cls.COMPUTED_FIELDS = computed_fields + cls.STR_LIST_COLUMNS = split_columns + cls.BOOL_COLUMNS = bools + + setattr(cls, PREFAB_FIELDS, list(fields.keys())) + + primary_key = None + for name, field in fields.items(): + if field.primary_key: + primary_key = name + break + + if primary_key is None: + raise AttributeError("sqlclass *must* have one primary key") + + if sum(1 for f in fields.values() if f.primary_key) > 1: + raise AttributeError("sqlclass *must* have **only** one primary key") + + cls.PRIMARY_KEY = primary_key + cls.TABLE_NAME = caps_to_snake(cls.__name__) + + super().__init_subclass__(**kwargs) + + @classmethod + def create_table(cls, con): + sql_field_list = [] + + for name, field in cls.VALID_FIELDS.items(): + field_type = TYPE_MAP[field.type] + if field.primary_key: + constraint = " PRIMARY KEY" + elif field.unique: + constraint = " UNIQUE" + else: + constraint = "" + + if field.computed: + field_str = f"{name} {field_type}{constraint} GENERATED ALWAYS AS ({field.computed})" + else: + field_str = f"{name} {field_type}{constraint}" + + sql_field_list.append(field_str) + + field_info = ", ".join(sql_field_list) + sql_command = f"CREATE TABLE IF NOT EXISTS {cls.TABLE_NAME}({field_info})" + + con.execute(sql_command) + + @classmethod + def drop_table(cls, con): + con.execute(f"DROP TABLE IF EXISTS {cls.TABLE_NAME}") + + @classmethod + def row_factory(cls, cursor, row): + fields = [column[0] for column in cursor.description] + kwargs = {} + for key, value in zip(fields, row, strict=True): + if key in cls.STR_LIST_COLUMNS: + kwargs[key] = separate_list(value) + elif key in cls.BOOL_COLUMNS: + kwargs[key] = bool(value) + else: + kwargs[key] = value + + return cls(**kwargs) # noqa + + @classmethod + def _select_query(cls, cursor, filters: dict[str, MAPPED_TYPES] | None = None): + filters = {} if filters is None else filters + + if filters: + keyfilter = [] + for key in filters.keys(): + if key not in cls.VALID_FIELDS: + raise KeyError(f"{key} is not a valid column for table {cls.TABLE_NAME}") + + keyfilter.append(f"{key} = :{key}") + + filter_str = ", ".join(keyfilter) + search_condition = f" WHERE {filter_str}" + else: + search_condition = "" + + cursor.row_factory = cls.row_factory + result = cursor.execute(f"SELECT * FROM {cls.TABLE_NAME} {search_condition}", filters) + return result + + @classmethod + def select_rows(cls, con, filters: dict[str, MAPPED_TYPES] | None = None): + cursor = con.cursor() + try: + result = cls._select_query(cursor, filters=filters) + rows = result.fetchall() + finally: + cursor.close() + + return rows + + @classmethod + def select_row(cls, con, filters: dict[str, MAPPED_TYPES] | None = None): + cursor = con.cursor() + try: + result = cls._select_query(cursor, filters=filters) + row = result.fetchone() + finally: + cursor.close() + + return row + + @classmethod + def select_like(cls, con, filters: dict[str, MAPPED_TYPES] | None = None): + filters = {} if filters is None else filters + + if filters: + keyfilter = [] + for key in filters.keys(): + if key not in cls.VALID_FIELDS: + raise KeyError(f"{key} is not a valid column for table {cls.TABLE_NAME}") + + keyfilter.append(f"{key} LIKE :{key}") + + filter_str = ", ".join(keyfilter) + search_condition = f" WHERE {filter_str}" + else: + search_condition = "" + + cursor = con.cursor() + try: + cursor.row_factory = cls.row_factory + result = cursor.execute( + f"SELECT * FROM {cls.TABLE_NAME} {search_condition}", + filters + ) + rows = result.fetchall() + finally: + cursor.close() + + return rows + + @classmethod + def max_pk(cls, con): + statement = f"SELECT MAX({cls.PRIMARY_KEY}) from {cls.TABLE_NAME}" + result = con.execute(statement) + return result.fetchone()[0] + + @classmethod + def row_from_pk(cls, con, pk_value): + return cls.select_row(con, filters={cls.PRIMARY_KEY: pk_value}) + + def insert_row(self, con): + columns = ", ".join( + f":{name}" + for name in self.VALID_FIELDS.keys() + if name not in self.COMPUTED_FIELDS + ) + sql_statement = f"INSERT INTO {self.TABLE_NAME} VALUES({columns})" + + processed_values = { + name: flatten_list(value) if isinstance(value, list) else value + for name, value in as_dict(self).items() + if name in self.VALID_FIELDS and name not in self.COMPUTED_FIELDS + } + + with con: + result = con.execute(sql_statement, processed_values) + + if getattr(self, self.PRIMARY_KEY) is None: + setattr(self, self.PRIMARY_KEY, result.lastrowid) + + if self.COMPUTED_FIELDS: + row = self.row_from_pk(con, result.lastrowid) + for field in self.COMPUTED_FIELDS: + setattr(self, field, getattr(row, field)) + + def update_row(self, con, columns: list[str]): + if self.PRIMARY_KEY is None: + raise AttributeError("Primary key has not yet been set") + + if invalid_columns := (set(columns) - self.VALID_FIELDS.keys()): + raise ValueError(f"Invalid fields: {invalid_columns}") + + processed_values = { + name: flatten_list(value) if isinstance(value, list) else value + for name, value in as_dict(self).items() + if name in self.VALID_FIELDS and name not in self.COMPUTED_FIELDS + } + + set_columns = ", ".join(f"{name} = :{name}" for name in columns) + search_condition = f"{self.PRIMARY_KEY} = :{self.PRIMARY_KEY}" + + with con: + con.execute( + f"UPDATE {self.TABLE_NAME} SET {set_columns} WHERE {search_condition}", + processed_values, + ) + + def delete_row(self, con): + if self.PRIMARY_KEY is None: + raise AttributeError("Primary key has not yet been set") + + pk_filter = {self.PRIMARY_KEY: getattr(self, self.PRIMARY_KEY)} + + with con: + con.execute( + f"DELETE FROM {self.TABLE_NAME} WHERE {self.PRIMARY_KEY} = :{self.PRIMARY_KEY}", + pk_filter, + ) diff --git a/src/ducktools/env/bootstrapping/bootstrap.py b/src/ducktools/env/bootstrapping/bootstrap.py index 1da0eab..d7b77f7 100644 --- a/src/ducktools/env/bootstrapping/bootstrap.py +++ b/src/ducktools/env/bootstrapping/bootstrap.py @@ -51,9 +51,14 @@ def is_outdated(installed_version: str | None, bundled_version: str) -> bool: if installed_version is None: return True - # Always consider dev versions outdated + # Installed pre-releases should be replaced by equal version pre-releases + # But not older releases if "dev" in installed_version: - return True + from _vendor.packaging.version import Version # type: ignore + installed_info = Version(installed_version) + bundled_info = Version(bundled_version) + + return installed_info.release <= bundled_info.release # Shortcut for identical version string if installed_version == bundled_version: diff --git a/src/ducktools/env/catalogue.py b/src/ducktools/env/catalogue.py index 8ef0623..9503ee7 100644 --- a/src/ducktools/env/catalogue.py +++ b/src/ducktools/env/catalogue.py @@ -1,18 +1,18 @@ # ducktools.env # MIT License -# +# # Copyright (c) 2024 David C Ellis -# +# # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: -# +# # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. -# +# # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE @@ -26,9 +26,10 @@ import os.path from datetime import datetime as _datetime, timedelta as _timedelta -from ducktools.classbuilder.prefab import Prefab, prefab, attribute, as_dict, get_attributes +from ducktools.classbuilder.prefab import prefab, attribute -from .exceptions import PythonVersionNotFound, InvalidEnvironmentSpec, VenvBuildError, ApplicationError +from ._sqlclasses import SQLAttribute, SQLClass +from .exceptions import InvalidEnvironmentSpec, VenvBuildError, ApplicationError from .environment_specs import EnvironmentSpec from .config import Config from ._logger import log @@ -45,17 +46,38 @@ def _datetime_now_iso() -> str: return _datetime.now().isoformat() -class BaseEnv(Prefab, kw_only=True): - name: str +class SQLContext: + def __init__(self, db): + self.db = db + self.connection = None + + def __enter__(self): + self.connection = _laz.sql.connect(self.db) + return self.connection + + def __exit__(self, exc_type, exc_val, exc_tb): + if self.connection is not None: + self.connection.close() + self.connection = None + + +class BaseEnvironment(SQLClass): + row_id: int = SQLAttribute(default=None, primary_key=True) + name: str = SQLAttribute(unique=True) path: str python_version: str parent_python: str - created_on: str = attribute(default_factory=_datetime_now_iso) - last_used: str = attribute(default_factory=_datetime_now_iso) + created_on: str = SQLAttribute(default_factory=_datetime_now_iso) + last_used: str = SQLAttribute(default_factory=_datetime_now_iso, compare=False) + + # This field is used to indicate that the venv is usable in case another process + # Attempts to run a script from the venv before it has finished construction + # This is False initially and set to True after dependencies are installed + completed: bool = False # Actually stored as INT spec_hashes: list[str] lock_hash: str | None = None - installed_modules: list[str] = attribute(default_factory=list) + installed_modules: list[str] = SQLAttribute(default_factory=list) @property def python_path(self) -> str: @@ -93,18 +115,31 @@ def is_valid(self) -> bool: """Check that both the folder exists and the source python exists""" return self.exists and self.parent_exists - def delete(self) -> None: - """Delete the cache folder""" - _laz.shutil.rmtree(self.path) + @property + def base_path(self) -> str: + # Override if there is a parent folder to the environment + return self.path -class TemporaryEnv(BaseEnv, kw_only=True): +class TemporaryEnvironment(BaseEnvironment): """ This is for temporary environments that expire after a certain period """ + name: str | None = SQLAttribute( + default=None, + computed="'env_' || CAST(row_id AS STRING)", + unique=True, + ) + root_path: str + + path: str | None = SQLAttribute( + default=None, + unique=True, + computed=f"root_path || '{os.sep}' || name" + ) -class ApplicationEnv(BaseEnv, kw_only=True): +class ApplicationEnvironment(BaseEnvironment): """ Environment for permanent applications that do not get outdated """ @@ -123,60 +158,61 @@ def is_outdated(self, spec_version: str): else: return _laz.Version(spec_version) > self.version_spec - def delete(self) -> None: - # Remove the parent folder of the venv - app_folder = os.path.normpath(os.path.join(self.path, os.path.pardir)) - _laz.shutil.rmtree(app_folder) + @property + def base_path(self) -> str: + # Apps are in a /env subfolder, this gets the parent app folder + return os.path.normpath(os.path.join(self.path, os.path.pardir)) @prefab(kw_only=True) class BaseCatalogue: - ENV_TYPE = BaseEnv - + ENV_TYPE = BaseEnvironment path: str - environments: dict[str, ENV_TYPE] = attribute(default_factory=dict) + + def __init__(self, *, path: str): + raise RuntimeError("BaseCatalogue should not be initialized") + + def __prefab_post_init__(self): + # Migration code from JSON catalogue to SQL catalogue + base_name = os.path.splitext(self.path)[0] + if os.path.exists(f"{base_name}.json"): + log("Old JSON environment cache detected, clearing folder.") + self.purge_folder() @property def catalogue_folder(self): return os.path.dirname(self.path) - def save(self) -> None: - """Serialize this class into a JSON string and save""" - # For external users that may not import prefab directly - os.makedirs(self.catalogue_folder, exist_ok=True) - - with open(self.path, "w") as f: - _laz.json.dump(self, f, default=as_dict, indent=2) + @property + def connection(self): + # Create the database if it does not exist + if not os.path.exists(self.path): + os.makedirs(os.path.dirname(self.path), exist_ok=True) + with SQLContext(self.path) as con: + self.ENV_TYPE.create_table(con) - @classmethod - def load(cls, path): - try: - with open(path, 'r') as f: - json_data = _laz.json.load(f) - except (FileNotFoundError, _laz.json.JSONDecodeError): - # noinspection PyArgumentList - return cls(path=path) - else: - cls_keys = {k for k, v in get_attributes(cls).items() if v.init} + return SQLContext(self.path) - filtered_data = { - k: v for k, v in json_data.items() if k in cls_keys + @property + def environments(self) -> dict[str, ENV_TYPE]: + with self.connection as con: + return { + env.name: env + for env in self.ENV_TYPE.select_rows(con) } - environments = {} - for k, v in filtered_data.get("environments", {}).items(): - environments[k] = cls.ENV_TYPE(**v) - - filtered_data["environments"] = environments - - # noinspection PyArgumentList - return cls(**filtered_data) + def env_by_name(self, envname: str) -> ENV_TYPE: + with self.connection as con: + return self.ENV_TYPE.select_row( + con, + filters={"name": envname} + ) def delete_env(self, envname: str) -> None: - if env := self.environments.get(envname): - env.delete() - del self.environments[envname] - self.save() + if env := self.env_by_name(envname): + _laz.shutil.rmtree(env.path) + with self.connection as con: + env.delete_row(con) else: raise FileNotFoundError(f"Cache {envname!r} not found") @@ -184,18 +220,13 @@ def purge_folder(self): """ Clear the cache folder when things have gone wrong or for a new version. """ - # This does not save as the act of deleting the catalogue folder - # will delete the file. It should not automatically be recreated. + # Clear the folder, by its nature this also deletes the database - # Clear the folder try: _laz.shutil.rmtree(self.catalogue_folder) except FileNotFoundError: # pragma: no cover pass - # Clear environment list - self.environments = {} - def find_env_hash(self, *, spec: EnvironmentSpec) -> ENV_TYPE | None: """ Attempt to find a cached python environment that matches the hash @@ -207,8 +238,17 @@ def find_env_hash(self, *, spec: EnvironmentSpec) -> ENV_TYPE | None: :param spec: EnvironmentSpec of requirements :return: CacheFolder details of python env that satisfies it or None """ - for cache in self.environments.values(): - if spec.spec_hash in cache.spec_hashes: + filters = { + "spec_hashes": f"%{spec.spec_hash}%" + } + with self.connection as con: + caches = self.ENV_TYPE.select_like(con, filters) + + for cache in caches: + if not cache.completed: + # Ignore venvs that are still being built + continue + if spec.lock_hash and (spec.lock_hash != cache.lock_hash): log(f"Input spec matched {cache.name}, but lockfile did not match.") continue @@ -221,57 +261,11 @@ def find_env_hash(self, *, spec: EnvironmentSpec) -> ENV_TYPE | None: continue cache.last_used = _datetime_now_iso() - self.save() - return cache - else: - return None - - @staticmethod - def _get_python_install( - spec: EnvironmentSpec, - uv_path: str | None, - config: Config, - ): - install = None + cache.update_row(con, ["last_used"]) - # Find a valid python executable - for inst in _laz.list_python_installs(): - if inst.implementation.lower() != "cpython": - # Ignore all non cpython installs for now - continue - if ( - not spec.details.requires_python - or spec.details.requires_python_spec.contains(inst.version_str) - ): - install = inst - break - else: - # If no Python was matched try to install a matching python from UV - if uv_path and config.uv_install_python: - uv_pythons = _laz.get_available_pythons(uv_path) - matched_python = False - for ver in uv_pythons: - if spec.details.requires_python_spec.contains(ver): - # Install matching python - _laz.install_uv_python( - uv_path=uv_path, - version_str=ver, - ) - matched_python = ver - break - if matched_python: - # Recover the actual install - for inst in _laz.get_installed_uv_pythons(): - if inst.version_str == matched_python: - install = inst - break - - if install is None: - raise PythonVersionNotFound( - f"Could not find a Python install satisfying {spec.details.requires_python!r}." - ) - - return install + return cache + else: + return None def _create_venv( self, @@ -399,29 +393,35 @@ def _create_venv( env.installed_modules.extend(installed_modules) - self.environments[env.name] = env - self.save() + env.completed = True + + with self.connection as con: + env.update_row(con, ["installed_modules", "completed"]) @prefab(kw_only=True) -class TempCatalogue(BaseCatalogue): +class TemporaryCatalogue(BaseCatalogue): """ Catalogue for temporary environments """ - ENV_TYPE = TemporaryEnv + ENV_TYPE = TemporaryEnvironment - environments: dict[str, ENV_TYPE] = attribute(default_factory=dict) - env_counter: int = 0 + # In theory some of the datetime work could now be done in sqlite + # But just keep the same logic as for JSON for now @property def oldest_cache(self) -> str | None: """ :return: name of the oldest cache or None if there are no caches """ + old_cache = None - for cache in self.environments.values(): + with self.connection as con: + caches = self.ENV_TYPE.select_rows(con) + + for cache in caches: if old_cache: - if cache.last_used < old_cache.last_used: + if cache.last_used_date < old_cache.last_used_date: old_cache = cache else: old_cache = cache @@ -445,8 +445,6 @@ def expire_caches(self, lifetime: _timedelta) -> None: if (ctime - cache.created_date) > lifetime: self.delete_env(cachename) - self.save() - def find_locked_env( self, *, @@ -459,11 +457,17 @@ def find_locked_env( :return: TemporaryEnv environment or None """ # Get lock data hash - for cache in self.environments.values(): - if ( - cache.lock_hash == spec.lock_hash - and cache.python_version in spec.details.requires_python_spec - ): + filters = {"lock_hash": spec.lock_hash} + with self.connection as con: + lock_caches = self.ENV_TYPE.select_rows(con, filters) + + for cache in lock_caches: + if not cache.completed: + # Ignore environments that are still being built + continue + + if cache.python_version in spec.details.requires_python_spec: + if not cache.is_valid: log(f"Cache {cache.name!r} does not point to a valid python, removing.") self.delete_env(cache.name) @@ -471,7 +475,6 @@ def find_locked_env( log(f"Lockfile hash {spec.lock_hash!r} matched environment {cache.name}") cache.last_used = _datetime_now_iso() - self.save() return cache else: return None @@ -487,6 +490,10 @@ def find_sufficient_env(self, *, spec: EnvironmentSpec) -> ENV_TYPE | None: """ for cache in self.environments.values(): + if not cache.completed: + # Ignore environments that are still being built + continue + # If no python version listed ignore it # If python version is listed, make sure it matches if spec.details.requires_python: @@ -523,9 +530,15 @@ def find_sufficient_env(self, *, spec: EnvironmentSpec) -> ENV_TYPE | None: log(f"Adding {spec.spec_hash!r} to {cache.name!r} hash list") cache.last_used = _datetime_now_iso() - cache.spec_hashes.append(spec.spec_hash) - self.save() + if spec.spec_hash not in cache.spec_hashes: + # If for whatever reason this has been called when hash matches + # Don't add the same hash multiple times. + cache.spec_hashes.append(spec.spec_hash) + + with self.connection as con: + cache.update_row(con, ["last_used", "spec_hashes"]) + return cache else: @@ -552,6 +565,7 @@ def create_env( config: Config, uv_path: str | None, installer_command: list[str], + base_python, ) -> ENV_TYPE: # Check the spec is valid if spec_errors := spec.details.errors(): @@ -563,46 +577,41 @@ def create_env( log(f"Deleting {del_cache}") self.delete_env(del_cache) - new_cachename = f"env_{self.env_counter}" - self.env_counter += 1 + with self.connection as con: - cache_path = os.path.join(self.catalogue_folder, new_cachename) - - install = self._get_python_install( - spec=spec, - uv_path=uv_path, - config=config, - ) + # Construct the Env + # noinspection PyArgumentList + new_env = self.ENV_TYPE( + root_path=self.catalogue_folder, + python_version=base_python.version_str, + parent_python=base_python.executable, + spec_hashes=[spec.spec_hash], + lock_hash=spec.lock_hash, + ) - # Construct the Env - # noinspection PyArgumentList - new_env = self.ENV_TYPE( - name=new_cachename, - path=cache_path, - python_version=install.version_str, - parent_python=install.executable, - spec_hashes=[spec.spec_hash], - lock_hash=spec.lock_hash, - ) + new_env.insert_row(con) - self._create_venv( - spec=spec, - uv_path=uv_path, - installer_command=installer_command, - env=new_env, - ) + try: + self._create_venv( + spec=spec, + uv_path=uv_path, + installer_command=installer_command, + env=new_env, + ) + except Exception: + with self.connection as con: + new_env.delete_row(con) + raise return new_env @prefab(kw_only=True) class ApplicationCatalogue(BaseCatalogue): - ENV_TYPE = ApplicationEnv - - environments: dict[str, ENV_TYPE] = attribute(default_factory=dict) + ENV_TYPE = ApplicationEnvironment def find_env_hash(self, *, spec: EnvironmentSpec) -> ENV_TYPE | None: - env: ApplicationEnv | None = super().find_env_hash(spec=spec) + env: ApplicationEnvironment | None = super().find_env_hash(spec=spec) if env: # Need to check the lockfile hasn't changed if a match is found @@ -630,6 +639,15 @@ def find_env(self, spec: EnvironmentSpec) -> ENV_TYPE | None: env = None if cache := self.environments.get(details.app.appkey): + if not cache.completed: + # Perhaps it should check the age of the env to decide if it should wait + # and see if the env has been created? + raise RuntimeError( + f"Environment \"{cache.name}\" has not been completed. " + "Either it is currently being built by another process " + "or the build has failed and the environment needs to be deleted." + ) + # Logic is a bit long here because if the versions match we want to # avoid generating the packaging.version. Otherwise we would check # for the outdated version first. @@ -645,6 +663,12 @@ def find_env(self, spec: EnvironmentSpec) -> ENV_TYPE | None: cache.last_used = _datetime_now_iso() cache.spec_hashes.append(spec.spec_hash) env = cache + + with self.connection as con: + cache.update_row( + con, + ["last_used", "spec_hashes"] + ) elif details.app.version_spec >= cache.version_spec: # Allow for the version spec to be equal cache.last_used = _datetime_now_iso() @@ -654,6 +678,12 @@ def find_env(self, spec: EnvironmentSpec) -> ENV_TYPE | None: cache.spec_hashes.append(spec.spec_hash) else: cache.spec_hashes = [spec.spec_hash] + + with self.connection as con: + cache.update_row( + con, + ["last_used", "spec_hashes", "version"] + ) env = cache else: raise ApplicationError( @@ -690,7 +720,6 @@ def find_env(self, spec: EnvironmentSpec) -> ENV_TYPE | None: f"app version: {details.app.version} \n" f"installed version: {cache.version}" ) - self.save() return env def create_env( @@ -700,6 +729,7 @@ def create_env( config: Config, uv_path: str, installer_command: list[str], + base_python, ): if not spec.lockdata: raise ApplicationError("Application environments require a lockfile.") @@ -725,18 +755,12 @@ def create_env( "env", ) - install = self._get_python_install( - spec=spec, - uv_path=uv_path, - config=config, - ) - # noinspection PyArgumentList new_env = self.ENV_TYPE( name=details.app.appkey, path=env_path, - python_version=install.version_str, - parent_python=install.executable, + python_version=base_python.version_str, + parent_python=base_python.executable, spec_hashes=[spec.spec_hash], lock_hash=spec.lock_hash, owner=details.app.owner, @@ -744,11 +768,19 @@ def create_env( version=details.app.version, ) - self._create_venv( - spec=spec, - uv_path=uv_path, - installer_command=installer_command, - env=new_env, - ) + with self.connection as con: + new_env.insert_row(con) + + try: + self._create_venv( + spec=spec, + uv_path=uv_path, + installer_command=installer_command, + env=new_env, + ) + except Exception: + with self.connection as con: + new_env.delete_row(con) + raise return new_env diff --git a/src/ducktools/env/manager.py b/src/ducktools/env/manager.py index 5f5444b..cae5dea 100644 --- a/src/ducktools/env/manager.py +++ b/src/ducktools/env/manager.py @@ -38,9 +38,9 @@ ) from .config import Config from .platform_paths import ManagedPaths -from .catalogue import TempCatalogue, ApplicationCatalogue +from .catalogue import TemporaryCatalogue, ApplicationCatalogue from .environment_specs import EnvironmentSpec -from .exceptions import UVUnavailableError, InvalidEnvironmentSpec +from .exceptions import UVUnavailableError, InvalidEnvironmentSpec, PythonVersionNotFound from ._lazy_imports import laz as _laz from ._logger import log @@ -51,7 +51,7 @@ class Manager(Prefab): config: Config = None paths: ManagedPaths = attribute(init=False, repr=False) - _temp_catalogue: TempCatalogue | None = attribute(default=None, private=True) + _temp_catalogue: TemporaryCatalogue | None = attribute(default=None, private=True) _app_catalogue: ApplicationCatalogue | None = attribute(default=None, private=True) def __prefab_post_init__(self, config): @@ -59,9 +59,9 @@ def __prefab_post_init__(self, config): self.config = Config.load(self.paths.config_path) if config is None else config @property - def temp_catalogue(self) -> TempCatalogue: + def temp_catalogue(self) -> TemporaryCatalogue: if self._temp_catalogue is None: - self._temp_catalogue = TempCatalogue.load(self.paths.cache_db) + self._temp_catalogue = TemporaryCatalogue(path=self.paths.cache_db) # Clear expired caches on load self._temp_catalogue.expire_caches(self.config.cache_lifetime_delta) @@ -70,7 +70,7 @@ def temp_catalogue(self) -> TempCatalogue: @property def app_catalogue(self) -> ApplicationCatalogue: if self._app_catalogue is None: - self._app_catalogue = ApplicationCatalogue.load(self.paths.application_db) + self._app_catalogue = ApplicationCatalogue(path=self.paths.application_db) return self._app_catalogue @property @@ -110,6 +110,48 @@ def retrieve_uv(self, required=False) -> str | None: return uv_path + def _get_python_install(self, spec: EnvironmentSpec): + install = None + + # Find a valid python executable + for inst in _laz.list_python_installs(): + if inst.implementation.lower() != "cpython": + # Ignore all non cpython installs for now + continue + if ( + not spec.details.requires_python + or spec.details.requires_python_spec.contains(inst.version_str) + ): + install = inst + break + else: + # If no Python was matched try to install a matching python from UV + if (uv_path := self.retrieve_uv()) and self.config.uv_install_python: + uv_pythons = _laz.get_available_pythons(uv_path) + matched_python = False + for ver in uv_pythons: + if spec.details.requires_python_spec.contains(ver): + # Install matching python + _laz.install_uv_python( + uv_path=uv_path, + version_str=ver, + ) + matched_python = ver + break + if matched_python: + # Recover the actual install + for inst in _laz.get_installed_uv_pythons(): + if inst.version_str == matched_python: + install = inst + break + + if install is None: + raise PythonVersionNotFound( + f"Could not find a Python install satisfying {spec.details.requires_python!r}." + ) + + return install + def install_base_command(self, use_uv=True) -> list[str]: # Get the installer command for python packages # Pip or the faster uv_pip if it is available @@ -175,23 +217,29 @@ def get_script_env(self, spec: EnvironmentSpec): # Request an application environment env = self.app_catalogue.find_env(spec=spec) + base_python = self._get_python_install(spec=spec) + if not env: env = self.app_catalogue.create_env( spec=spec, config=self.config, uv_path=self.retrieve_uv(), installer_command=self.install_base_command(), + base_python=base_python ) else: env = self.temp_catalogue.find_env(spec=spec) if not env: log("Existing environment not found, creating new environment.") + base_python = self._get_python_install(spec=spec) + env = self.temp_catalogue.create_env( spec=spec, config=self.config, uv_path=self.retrieve_uv(), installer_command=self.install_base_command(), + base_python=base_python, ) return env diff --git a/src/ducktools/env/platform_paths.py b/src/ducktools/env/platform_paths.py index d4ba081..9d550f8 100644 --- a/src/ducktools/env/platform_paths.py +++ b/src/ducktools/env/platform_paths.py @@ -42,8 +42,8 @@ class UnsupportedPlatformError(Exception): # Filenames for configuration and catalogue CONFIG_FILENAME = "config.json" -CATALOGUE_FILENAME = "catalogue.json" -APPCATALOGUE_FILENAME = "app_catalogue.json" +CATALOGUE_FILENAME = "catalogue.db" +APPCATALOGUE_FILENAME = "app_catalogue.db" # Store in LOCALAPPDATA for windows, User folder for other operating systems diff --git a/tests/conftest.py b/tests/conftest.py index 05490b6..a04bd72 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -16,12 +16,12 @@ import sys import os.path -import shutil +import tempfile from ducktools.pythonfinder import get_python_installs from ducktools.pythonfinder.shared import get_install_details -from ducktools.env.catalogue import TempCatalogue +from ducktools.env.catalogue import TemporaryCatalogue from ducktools.env.config import Config import ducktools.env.platform_paths as platform_paths @@ -39,6 +39,10 @@ def available_pythons(): def this_python(): py = sys.executable details = get_install_details(py) + # Pretend PyPy is CPython for tests + if details.implementation == "pypy": + details.implementation = "cpython" + # Remove pre-release number from version! details.version = *details.version[:3], "release", 0 return details @@ -56,13 +60,10 @@ def catalogue_path(): """ Provide a test folder path for python environments, delete after tests in a class have run. """ - folder = os.path.join(os.path.dirname(__file__), "test_envs") - cache_file = os.path.join(folder, platform_paths.CATALOGUE_FILENAME) - yield cache_file - try: - shutil.rmtree(folder) - except FileNotFoundError: - pass + base_folder = os.path.join(os.path.dirname(__file__), "testing_data") + with tempfile.TemporaryDirectory(dir=base_folder) as folder: + cache_file = os.path.join(folder, platform_paths.CATALOGUE_FILENAME) + yield cache_file @pytest.fixture(scope="session") @@ -76,5 +77,5 @@ def test_config(): @pytest.fixture(scope="function") def testing_catalogue(catalogue_path): - catalogue = TempCatalogue(path=catalogue_path) + catalogue = TemporaryCatalogue(path=catalogue_path) yield catalogue diff --git a/tests/example_scripts/pep_723_example.py b/tests/example_scripts/pep_723_example.py index 5e3d34c..ce20d20 100644 --- a/tests/example_scripts/pep_723_example.py +++ b/tests/example_scripts/pep_723_example.py @@ -1,5 +1,5 @@ # /// script -# requires-python = ">=3.11" +# requires-python = ">=3.10" # dependencies = [ # "requests<3", # "rich", diff --git a/tests/example_scripts/pep_723_example_subset.py b/tests/example_scripts/pep_723_example_subset.py new file mode 100644 index 0000000..abfc9ef --- /dev/null +++ b/tests/example_scripts/pep_723_example_subset.py @@ -0,0 +1,6 @@ +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "requests<3", +# ] +# /// \ No newline at end of file diff --git a/tests/example_scripts/print_environment_variables.py b/tests/example_scripts/print_environment_variables.py index 9b8016d..78dec90 100644 --- a/tests/example_scripts/print_environment_variables.py +++ b/tests/example_scripts/print_environment_variables.py @@ -22,7 +22,7 @@ # SOFTWARE. # /// script -# requires-python = ">=3.11" +# requires-python = ">=3.10" # # [tool.ducktools.env] # include.data = ["./"] diff --git a/tests/test_catalogue.py b/tests/test_catalogue.py index d5a83df..d29990b 100644 --- a/tests/test_catalogue.py +++ b/tests/test_catalogue.py @@ -20,53 +20,66 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. +import os import sys +import tempfile import unittest.mock as mock + from datetime import datetime, timedelta from pathlib import Path -from ducktools.classbuilder.prefab import as_dict - import pytest from packaging.version import Version from ducktools.env.catalogue import ( - BaseCatalogue, + BaseCatalogue, ApplicationCatalogue, - TempCatalogue, - ApplicationEnv, - TemporaryEnv, + TemporaryCatalogue, + ApplicationEnvironment, + TemporaryEnvironment, ) from ducktools.env.config import Config from ducktools.env.environment_specs import EnvironmentSpec from ducktools.env.exceptions import PythonVersionNotFound +import ducktools.env.platform_paths as platform_paths -@pytest.fixture -def mock_save(): - # Mock the .save() function from BaseCatalogue - with mock.patch.object(BaseCatalogue, "save") as save_func: - yield save_func +@pytest.fixture(scope="function") +def sql_catalogue_path(): + """ + Provide a test folder path for python environments, delete after tests in a class have run. + """ + base_folder = os.path.join(os.path.dirname(__file__), "testing_data") + os.makedirs(base_folder, exist_ok=True) + with tempfile.TemporaryDirectory(dir=base_folder) as folder: + cache_file = os.path.join(folder, platform_paths.CATALOGUE_FILENAME) + yield cache_file @pytest.fixture(scope="function") -def fake_temp_envs(catalogue_path): - env_0_path = str(Path(catalogue_path).parent / "env_0") - env_1_path = str(Path(catalogue_path).parent / "env_1") - env_2_path = str(Path(catalogue_path).parent / "env_2") +def fake_temp_catalogue(sql_catalogue_path): + cat = TemporaryCatalogue( + path=sql_catalogue_path, + ) + return cat + + +@pytest.fixture(scope="function") +def fake_temp_envs(fake_temp_catalogue): python_path = sys.executable python_version = ".".join(str(item) for item in sys.version_info[:3]) # ENV examples based on examples folder - env_0 = TemporaryEnv( - name="env_0", - path=env_0_path, + env_0 = TemporaryEnvironment( + row_id=0, + root_path=fake_temp_catalogue.catalogue_folder, python_version=python_version, parent_python=python_path, created_on="2024-09-02T14:55:53.102038", last_used="2024-09-02T14:55:53.102038", - spec_hashes=["6986c6ae4a2965a4456333b8c60c5ac923ddca0d7edaa70b36b50f545ed8b24b"], + completed=True, + spec_hashes=["0caeabf94f2a523db4bb52752ef95067dd7e5c1e8f5b1e249dc37abdd1e60e1f"], installed_modules=[ "certifi==2024.8.30", "charset-normalizer==3.3.2", @@ -80,45 +93,74 @@ def fake_temp_envs(catalogue_path): ] ) - env_1 = TemporaryEnv( - name="env_1", - path=env_1_path, + env_1 = TemporaryEnvironment( + row_id=1, + root_path=fake_temp_catalogue.catalogue_folder, python_version=python_version, parent_python=python_path, created_on="2024-09-02T14:55:58.827666", last_used="2024-09-02T14:55:58.827666", + completed=False, spec_hashes=["85cdf5c0f9b109ba70cd936b153fd175307406eb802e05df453d5ccf5a19383f"], installed_modules=["cowsay==6.1"], ) - env_2 = TemporaryEnv( - name="env_2", - path=env_2_path, + env_2 = TemporaryEnvironment( + row_id=2, + root_path=fake_temp_catalogue.catalogue_folder, + python_version=python_version, + parent_python=python_path, + created_on="2024-09-02T14:55:59.827666", + last_used="2024-09-02T14:55:59.827666", + completed=True, + spec_hashes=["85cdf5c0f9b109ba70cd936b153fd175307406eb802e05df453d5ccf5a19383f"], + installed_modules=["cowsay==6.1"], + ) + + env_3 = TemporaryEnvironment( + row_id=3, + root_path=fake_temp_catalogue.catalogue_folder, python_version=python_version, parent_python=python_path, created_on="2024-09-25T17:55:23.254577", last_used="2024-09-26T11:29:12.233691", + completed=True, spec_hashes=["85cdf5c0f9b109ba70cd936b153fd175307406eb802e05df453d5ccf5a19383f"], lock_hash="840760dd5d911f145b94c72e670754391bf19c33d5272da7362b629c484fd1f6", installed_modules=["cowsay==6.1"], ) - return {"env_0": env_0, "env_1": env_1, "env_2": env_2} + # Add the environments to the catalogue so they get names and paths + with fake_temp_catalogue.connection as con: + env_0.insert_row(con) + env_1.insert_row(con) + env_2.insert_row(con) + env_3.insert_row(con) + + return {"env_0": env_0, "env_1": env_1, "env_2": env_2, "env_3": env_3} + + +@pytest.fixture(scope="function") +def fake_full_catalogue(fake_temp_catalogue, fake_temp_envs): + # By using fake_temp_envs the catalogue is populated + return fake_temp_catalogue + @pytest.fixture -def fake_app_env(catalogue_path): +def fake_app_env(sql_catalogue_path): python_path = sys.executable python_version = ".".join(str(item) for item in sys.version_info[:3]) # Env based on examples folder appname = "ducktools_testing/cowsay_example" - env = ApplicationEnv( + env = ApplicationEnvironment( name=appname, - path=str(Path(catalogue_path).parent / "ducktools_testing/cowsay_example/env"), + path=str(Path(sql_catalogue_path).parent / "ducktools_testing/cowsay_example/env"), python_version=python_version, parent_python=python_path, created_on="2024-09-25T17:55:23.254577", last_used="2024-09-26T11:29:12.233691", + completed=True, spec_hashes=[ "226500066700d7910b3a57470f12f97ed402fe68b8b31fb592f0a76f7f0bd682" ], @@ -132,40 +174,28 @@ def fake_app_env(catalogue_path): ) return env - - -@pytest.fixture(scope="function") -def fake_temp_catalogue(catalogue_path, fake_temp_envs): - cat = TempCatalogue( - path=catalogue_path, - environments=fake_temp_envs, - env_counter=2, - ) - - yield cat # ENVIRONMENT TESTS -@pytest.mark.usefixtures("mock_save") class TestTempEnv: - @pytest.mark.parametrize("envname", ["env_0", "env_1", "env_2"]) - def test_python_path(self, fake_temp_envs, envname, catalogue_path): + @pytest.mark.parametrize("envname", ["env_0", "env_1", "env_2", "env_3"]) + def test_python_path(self, fake_temp_envs, envname, sql_catalogue_path): env = fake_temp_envs[envname] - base_path = Path(catalogue_path).parent + base_path = Path(sql_catalogue_path).parent if sys.platform == "win32": assert env.python_path == str(base_path / envname / "Scripts" / "python.exe") else: assert env.python_path == str(base_path / envname / "bin" / "python") - + @pytest.mark.skipif(sys.platform != "win32", reason="Windows only test") - @pytest.mark.parametrize("envname", ["env_0", "env_1", "env_2"]) - def test_python_path_windowed(self, fake_temp_envs, envname, catalogue_path): + @pytest.mark.parametrize("envname", ["env_0", "env_1", "env_2", "env_3"]) + def test_python_path_windowed(self, fake_temp_envs, envname, sql_catalogue_path): # If there is no stdout on windows assume windowed executable with mock.patch("sys.stdout", new=None): env = fake_temp_envs[envname] - base_path = Path(catalogue_path).parent + base_path = Path(sql_catalogue_path).parent assert env.python_path == str(base_path / envname / "Scripts" / "pythonw.exe") @@ -173,9 +203,9 @@ def test_dates(self, fake_temp_envs): env_0 = fake_temp_envs["env_0"] assert env_0.last_used_simple == "2024-09-02 14:55:53" - env_1 = fake_temp_envs["env_1"] - assert env_1.last_used_simple == "2024-09-02 14:55:58" - + env_2 = fake_temp_envs["env_2"] + assert env_2.last_used_simple == "2024-09-02 14:55:59" + def test_exists(self, fake_temp_envs): env_0 = fake_temp_envs["env_0"] assert env_0.exists is False @@ -184,31 +214,22 @@ def test_exists(self, fake_temp_envs): # Check the logic requires both exists and parent_exists to be True with mock.patch.object( - TemporaryEnv, - "exists", + TemporaryEnvironment, + "exists", new_callable=mock.PropertyMock ) as mock_exists: mock_exists.return_value = True assert env_0.is_valid is True with mock.patch.object( - TemporaryEnv, - "parent_exists", + TemporaryEnvironment, + "parent_exists", new_callable=mock.PropertyMock ) as mock_parent_exists: mock_parent_exists.return_value = False assert env_0.is_valid is False - @pytest.mark.parametrize("envname", ["env_0", "env_1", "env_2"]) - def test_delete(self, fake_temp_envs, envname): - with mock.patch("shutil.rmtree") as rmtree: - env = fake_temp_envs[envname] - env.delete() - - rmtree.assert_called_once_with(env.path) - -@pytest.mark.usefixtures("mock_save") class TestAppEnv: def test_version_spec(self, fake_app_env): assert fake_app_env.version_spec == Version("0.1.0") @@ -221,110 +242,16 @@ def test_version_spec(self, fake_app_env): assert fake_app_env.is_outdated("v0.1.1") assert fake_app_env.is_outdated("v0.1.1a1") - def test_delete(self, fake_app_env): - with mock.patch("shutil.rmtree") as rmtree: - fake_app_env.delete() - - del_path = str(Path(fake_app_env.path).parent) - rmtree.assert_called_once_with(del_path) - # CATALOGUE TESTS +def test_base_catalogue_noinit(): + # Base catalogue should not be created + with pytest.raises(RuntimeError): + _ = BaseCatalogue(path="cant/create/basecatalogue") -# All other tests mock out the save command -def test_catalogue_save(fake_temp_catalogue): - cat = fake_temp_catalogue - with ( - mock.patch("os.makedirs") as makedirs_mock, - mock.patch("json.dump") as dump_mock, - mock.patch("builtins.open") as open_mock - ): - file_mock = mock.MagicMock() - open_mock.return_value.__enter__.return_value = file_mock - - cat.save() - - makedirs_mock.assert_called_once_with(cat.catalogue_folder, exist_ok=True) - open_mock.assert_called_once_with(cat.path, "w") - dump_mock.assert_called_once_with(cat, file_mock, default=as_dict, indent=2) - -# Get python install has so many branches I wanted a separate test - -class TestGetPythonInstall: - example_paths = Path(__file__).parent / "example_scripts" - - def test_finds_python(self, fake_temp_catalogue): - script = str(self.example_paths / "pep_723_example.py") - spec = EnvironmentSpec.from_script(script) - - # Patch the spec version to match this python install - this_python = ".".join(str(i) for i in sys.version_info[:3]) - spec.details.requires_python = f"=={this_python}" - - inst = fake_temp_catalogue._get_python_install( - spec=spec, - uv_path=None, - config=Config(uv_install_python=False) - ) - - assert inst.executable == sys.executable - - - def test_no_python(self, fake_temp_catalogue): - script = str(self.example_paths / "pep_723_example.py") - spec = EnvironmentSpec.from_script(script) - - # Patch the spec version to match this python install - this_python = ".".join(str(i) for i in sys.version_info[:3]) - spec.details.requires_python = f">{this_python}" - - with pytest.raises(PythonVersionNotFound): - fake_temp_catalogue._get_python_install( - spec=spec, - uv_path=None, - config=Config(uv_install_python=False) - ) - - - -@pytest.mark.usefixtures("mock_save") class TestTempCatalogue: # Shared tests for any catalogue - def test_load_env(self, fake_temp_catalogue): - with ( - mock.patch("json.load") as mock_load, - mock.patch("builtins.open") as mock_open, - ): - mock_file = mock.MagicMock() - mock_open.return_value.__enter__.return_value = mock_file - - catalogue_dict = as_dict(fake_temp_catalogue) - catalogue_dict["environments"] = { - env.name: as_dict(env) - for env in catalogue_dict["environments"].values() - } - - mock_load.return_value = catalogue_dict - - fake_path = "path/to/catalogue.json" - - cat = TempCatalogue.load(fake_path) - - assert cat == fake_temp_catalogue - - mock_open.assert_called_once_with(fake_path, 'r') - mock_load.assert_called_once_with(mock_file) - - def test_load_fail_notfound(self): - with mock.patch("builtins.open") as mock_open: - mock_open.side_effect = FileNotFoundError() - fake_path = "path/to/catalogue.json" - - cat = TempCatalogue.load(fake_path) - - assert cat == TempCatalogue(path=fake_path) - - def test_delete_env(self, fake_temp_catalogue, fake_temp_envs, mock_save): + def test_delete_env(self, fake_temp_catalogue, fake_temp_envs): with mock.patch("shutil.rmtree") as rmtree: pth = fake_temp_envs["env_0"].path @@ -332,8 +259,6 @@ def test_delete_env(self, fake_temp_catalogue, fake_temp_envs, mock_save): rmtree.assert_called_once_with(pth) - mock_save.assert_called() - assert "env_0" not in fake_temp_catalogue.environments def test_delete_nonexistent_env(self, fake_temp_catalogue): @@ -343,7 +268,6 @@ def test_delete_nonexistent_env(self, fake_temp_catalogue): def test_purge_folder(self, fake_temp_catalogue): with mock.patch("shutil.rmtree") as rmtree: - fake_temp_catalogue.purge_folder() rmtree.assert_called_once_with(fake_temp_catalogue.catalogue_folder) @@ -354,70 +278,130 @@ def test_find_env_hash(self, fake_temp_catalogue, fake_temp_envs): # The python path and folder doesn't actually exist # But pretend it does - with mock.patch.object(TemporaryEnv, "is_valid", new=True): + with mock.patch.object(TemporaryEnvironment, "is_valid", new=True): env_0_spec = EnvironmentSpec.from_script( str(example_paths / "pep_723_example.py") ) env_0_recover = fake_temp_catalogue.find_env_hash(spec=env_0_spec) # This should find the env without the lockfile - env_1_spec = EnvironmentSpec.from_script( + env_1_and_2_spec = EnvironmentSpec.from_script( str(example_paths / "cowsay_ex_nolock.py") ) - env_1_recover = fake_temp_catalogue.find_env_hash(spec=env_1_spec) + env_2_recover = fake_temp_catalogue.find_env_hash(spec=env_1_and_2_spec) # This should only find the env *with* the lockfile # Despite being the same original spec - env_2_spec = EnvironmentSpec.from_script( + env_3_spec = EnvironmentSpec.from_script( str(example_paths / "cowsay_ex.py") ) - env_2_recover = fake_temp_catalogue.find_env_hash(spec=env_2_spec) + env_3_recover = fake_temp_catalogue.find_env_hash(spec=env_3_spec) + # env_1 should not be recovered even though it matches the spec + # As it is marked as incomplete assert env_0_recover == fake_temp_envs["env_0"] - assert env_1_recover == fake_temp_envs["env_1"] assert env_2_recover == fake_temp_envs["env_2"] + assert env_3_recover == fake_temp_envs["env_3"] - def test_find_env_hash_fail(self, fake_temp_catalogue): + def test_find_env_hash_fail(self, fake_full_catalogue): with ( - mock.patch.object(TempCatalogue, "delete_env") as mock_delete, - mock.patch.object(TemporaryEnv, "is_valid", new=False) + mock.patch.object(TemporaryCatalogue, "delete_env") as mock_delete, + mock.patch.object(TemporaryEnvironment, "is_valid", new=False) ): example_paths = Path(__file__).parent / "example_scripts" env_0_spec = EnvironmentSpec.from_script( str(example_paths / "pep_723_example.py") ) - empty_recover = fake_temp_catalogue.find_env_hash(spec=env_0_spec) + empty_recover = fake_full_catalogue.find_env_hash(spec=env_0_spec) assert empty_recover is None mock_delete.assert_called_with("env_0") + def test_find_env_sufficient(self, fake_full_catalogue, fake_temp_envs): + example_paths = Path(__file__).parent / "example_scripts" + spec = EnvironmentSpec.from_script( + example_paths / "pep_723_example_subset.py" + ) + + with mock.patch.object(TemporaryEnvironment, "is_valid", new=True): + env_0_recover = fake_full_catalogue.find_sufficient_env(spec=spec) + + original_env = fake_temp_envs["env_0"] + + # env_0 has been updated + assert env_0_recover.name == original_env.name + assert env_0_recover.last_used_date > original_env.last_used_date + + # New spec has been added to the hashes + assert env_0_recover.spec_hashes == [*original_env.spec_hashes, spec.spec_hash] + + def test_correct_find_env_called(self, fake_full_catalogue, fake_temp_envs): + with ( + mock.patch.object(TemporaryEnvironment, "is_valid", new=True), + mock.patch.object( + TemporaryCatalogue, + "find_locked_env", + wraps=fake_full_catalogue.find_locked_env, + ) as mock_locked, + mock.patch.object( + TemporaryCatalogue, + "find_sufficient_env", + wraps=fake_full_catalogue.find_sufficient_env, + ) as mock_sufficient, + ): + example_paths = Path(__file__).parent / "example_scripts" + + # env_0 does not have a lock file, should look for sufficient + env_0_spec = EnvironmentSpec.from_script( + str(example_paths / "pep_723_example.py") + ) + env_0_recover = fake_full_catalogue.find_env(spec=env_0_spec) + assert fake_temp_envs["env_0"].name == env_0_recover.name + mock_sufficient.assert_called_once_with(spec=env_0_spec) + mock_locked.assert_not_called() + mock_sufficient.reset_mock() + mock_locked.reset_mock() + + # env_3 has a lockfile, should look for the matching lock env + env_3_spec = EnvironmentSpec.from_script( + str(example_paths / "cowsay_ex.py") + ) + env_3_recover = fake_full_catalogue.find_env(spec=env_3_spec) + assert fake_temp_envs["env_3"].name == env_3_recover.name + mock_sufficient.assert_not_called() + mock_locked.assert_called_once_with(spec=env_3_spec) + mock_sufficient.reset_mock() + mock_locked.reset_mock() # Temp catalogue specific tests - def test_oldest_cache(self, fake_temp_catalogue): - assert fake_temp_catalogue.oldest_cache == "env_0" + def test_oldest_cache(self, fake_full_catalogue): + assert fake_full_catalogue.oldest_cache == "env_0" # "Use" env_0 - fake_temp_catalogue.environments["env_0"].last_used = datetime.now().isoformat() - - assert fake_temp_catalogue.oldest_cache == "env_1" + env_0 = fake_full_catalogue.environments["env_0"] + env_0.last_used = datetime.now().isoformat() + + with fake_full_catalogue.connection as con: + env_0.update_row(con, columns=["last_used"]) - # Empty catalogue returns None as oldest cache - fake_temp_catalogue.environments = {} + assert fake_full_catalogue.oldest_cache == "env_1" - assert fake_temp_catalogue.oldest_cache is None + fake_full_catalogue.purge_folder() - def test_expire_caches(self, fake_temp_catalogue, mock_save): - with mock.patch.object(fake_temp_catalogue, "delete_env") as del_env: + assert fake_full_catalogue.oldest_cache is None + + def test_expire_caches(self, fake_full_catalogue): + with mock.patch.object(fake_full_catalogue, "delete_env") as del_env: # Expire all caches - fake_temp_catalogue.expire_caches(timedelta(seconds=1)) + fake_full_catalogue.expire_caches(timedelta(seconds=1)) calls = [ mock.call("env_0"), mock.call("env_1"), + mock.call("env_2"), + mock.call("env_3"), ] - del_env.assert_has_calls(calls) - - mock_save.assert_called_once() \ No newline at end of file + assert del_env.mock_calls == calls diff --git a/tests/test_environmentspec.py b/tests/test_environmentspec.py index 7508429..87459ce 100644 --- a/tests/test_environmentspec.py +++ b/tests/test_environmentspec.py @@ -92,7 +92,7 @@ def test_print_envvars_script(self): spec = EnvironmentSpec.from_script(envvar_script_path) - assert spec.details.requires_python == ">=3.11" + assert spec.details.requires_python == ">=3.10" assert spec.details.dependencies == [] assert spec.details.data_sources == ["./"] diff --git a/tests/test_integration/test_build_retrieve.py b/tests/test_integration/test_build_retrieve.py index 2cc3b32..053bfc8 100644 --- a/tests/test_integration/test_build_retrieve.py +++ b/tests/test_integration/test_build_retrieve.py @@ -32,11 +32,14 @@ def test_build_retrieve(self, testing_catalogue, test_config): # Test the env does not exist yet assert testing_catalogue.find_env(spec=spec) is None + python_install = manager._get_python_install(spec=spec) + real_env = testing_catalogue.create_env( spec=spec, config=test_config, uv_path=manager.retrieve_uv(), installer_command=manager.install_base_command(), + base_python=python_install, ) assert real_env is not None diff --git a/tests/test_manager.py b/tests/test_manager.py new file mode 100644 index 0000000..c2d9f9b --- /dev/null +++ b/tests/test_manager.py @@ -0,0 +1,67 @@ +# ducktools.env +# MIT License +# +# Copyright (c) 2024 David C Ellis +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# Get python install has so many branches I wanted a separate test +import sys + +from pathlib import Path + +import pytest + +from ducktools.env.environment_specs import EnvironmentSpec +from ducktools.env.exceptions import PythonVersionNotFound +from ducktools.env.config import Config +from ducktools.env.manager import Manager + + +class TestGetPythonInstall: + example_paths = Path(__file__).parent / "example_scripts" + + def test_finds_python(self): + config = Config(use_uv=False, uv_install_python=False) + manager = Manager(project_name="ducktools-testing", config=config) + + script = str(self.example_paths / "pep_723_example.py") + spec = EnvironmentSpec.from_script(script) + + # Patch the spec version to match this python install + this_python = ".".join(str(i) for i in sys.version_info[:3]) + spec.details.requires_python = f"=={this_python}" + + inst = manager._get_python_install(spec=spec) + + assert inst.executable == sys.executable + + def test_no_python(self): + config = Config(use_uv=False, uv_install_python=False) + + manager = Manager(project_name="ducktools-testing", config=config) + script = str(self.example_paths / "pep_723_example.py") + spec = EnvironmentSpec.from_script(script) + + # Patch the spec version to match this python install + this_python = ".".join(str(i) for i in sys.version_info[:3]) + spec.details.requires_python = f">{this_python}" + + with pytest.raises(PythonVersionNotFound): + manager._get_python_install(spec=spec) + diff --git a/tests/test_platform_paths.py b/tests/test_platform_paths.py index ede96e2..423fe09 100644 --- a/tests/test_platform_paths.py +++ b/tests/test_platform_paths.py @@ -59,9 +59,9 @@ def test_basic_paths(self): assert self.paths.pip_zipapp == str(project_folder / "lib" / "pip.pyz") assert self.paths.env_folder == str(project_folder / "lib" / "ducktools-env") assert self.paths.application_folder == str(project_folder / "applications") - assert self.paths.application_db == str(project_folder / "applications" / "app_catalogue.json") + assert self.paths.application_db == str(project_folder / "applications" / "app_catalogue.db") assert self.paths.cache_folder == str(project_folder / "caches") - assert self.paths.cache_db == str(project_folder / "caches" / "catalogue.json") + assert self.paths.cache_db == str(project_folder / "caches" / "catalogue.db") assert self.paths.build_base == str(project_folder / "build") if sys.platform == "win32": @@ -69,7 +69,6 @@ def test_basic_paths(self): else: assert self.paths.uv_executable == str(project_folder / "lib" / "uv") - def test_get_versions(self): with mock.patch.object(self.paths, "get_app_version") as gav_mock: gav_mock.return_value = "0.1.0"