diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index b5f3ccff40..e602e4d512 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -42,6 +42,8 @@ * `--follow`: Stream logs in real-time. * `--follow-interval`: Set custom polling intervals during log streaming. * `snow connection add` supports `--no-interactive` flag to skip interactive prompts. +* Added support for glob pattern in artifact paths in snowflake.yml for Streamlit. +* Added support for glob pattern in artifact paths in snowflake.yml for Snowpark, requires ENABLE_SNOWPARK_GLOB_SUPPORT feature flag. ## Fixes and improvements * `snow --info` callback returns information about `SNOWFLAKE_HOME` variable. diff --git a/src/snowflake/cli/_plugins/nativeapp/entities/application_package.py b/src/snowflake/cli/_plugins/nativeapp/entities/application_package.py index 83f32d8b52..5eb4e4ee2d 100644 --- a/src/snowflake/cli/_plugins/nativeapp/entities/application_package.py +++ b/src/snowflake/cli/_plugins/nativeapp/entities/application_package.py @@ -4,7 +4,7 @@ import re from pathlib import Path from textwrap import dedent -from typing import List, Literal, Optional, Union +from typing import List, Literal, Optional import typer from click import BadOptionUsage, ClickException @@ -64,6 +64,7 @@ ) from snowflake.cli.api.errno import DOES_NOT_EXIST_OR_NOT_AUTHORIZED from snowflake.cli.api.exceptions import SnowflakeSQLExecutionError +from snowflake.cli.api.project.schemas.commons import Artifacts from snowflake.cli.api.project.schemas.entities.common import ( EntityModelBase, Identifier, @@ -90,7 +91,7 @@ class ApplicationPackageEntityModel(EntityModelBase): type: Literal["application package"] = DiscriminatorField() # noqa: A003 - artifacts: List[Union[PathMapping, str]] = Field( + artifacts: Artifacts = Field( title="List of paths or file source/destination pairs to add to the deploy root", ) bundle_root: Optional[str] = Field( @@ -137,10 +138,8 @@ def append_test_resource_suffix_to_identifier( @field_validator("artifacts") @classmethod - def transform_artifacts( - cls, orig_artifacts: List[Union[PathMapping, str]] - ) -> List[PathMapping]: - transformed_artifacts = [] + def transform_artifacts(cls, orig_artifacts: Artifacts) -> List[PathMapping]: + transformed_artifacts: List[PathMapping] = [] if orig_artifacts is None: return transformed_artifacts diff --git a/src/snowflake/cli/_plugins/snowpark/commands.py b/src/snowflake/cli/_plugins/snowpark/commands.py index ba73ca59e4..cbe9cea568 100644 --- a/src/snowflake/cli/_plugins/snowpark/commands.py +++ b/src/snowflake/cli/_plugins/snowpark/commands.py @@ -20,6 +20,7 @@ import typer from click import ClickException, UsageError +from snowflake.cli._plugins.nativeapp.artifacts import BundleMap, symlink_or_copy from snowflake.cli._plugins.object.commands import ( describe as object_describe, ) @@ -59,7 +60,7 @@ IndexUrlOption, SkipVersionCheckOption, ) -from snowflake.cli._plugins.snowpark.zipper import zip_dir +from snowflake.cli._plugins.snowpark.zipper import zip_dir, zip_dir_using_bundle_map from snowflake.cli._plugins.stage.manager import StageManager from snowflake.cli.api.cli_global_context import ( get_cli_context, @@ -81,6 +82,7 @@ from snowflake.cli.api.exceptions import ( SecretsWithoutExternalAccessIntegrationError, ) +from snowflake.cli.api.feature_flags import FeatureFlag from snowflake.cli.api.identifiers import FQN from snowflake.cli.api.output.types import ( CollectionResult, @@ -95,6 +97,7 @@ ProjectDefinition, ProjectDefinitionV2, ) +from snowflake.cli.api.project.schemas.v1.native_app.path_mapping import PathMapping from snowflake.cli.api.secure_path import SecurePath from snowflake.connector import DictCursor, ProgrammingError from snowflake.connector.cursor import SnowflakeCursor @@ -225,8 +228,8 @@ def build_artifacts_mappings( entities_to_imports_map[entity_id].add(artefact_dto.import_path(stage)) stages_to_artifact_map[stage].update(required_artifacts) - if project_paths.dependencies.exists(): - deps_artefact = project_paths.get_dependencies_artefact() + deps_artefact = project_paths.get_dependencies_artefact() + if deps_artefact.post_build_path.exists(): stages_to_artifact_map[stage].add(deps_artefact) entities_to_imports_map[entity_id].add(deps_artefact.import_path(stage)) return entities_to_imports_map, stages_to_artifact_map @@ -239,11 +242,12 @@ def create_stages_and_upload_artifacts(stages_to_artifact_map: StageToArtefactMa stage = FQN.from_stage(stage).using_context() stage_manager.create(fqn=stage, comment="deployments managed by Snowflake CLI") for artefact in artifacts: + post_build_path = artefact.post_build_path cli_console.step( - f"Uploading {artefact.post_build_path.name} to {artefact.upload_path(stage)}" + f"Uploading {post_build_path.name} to {artefact.upload_path(stage)}" ) stage_manager.put( - local_path=artefact.post_build_path, + local_path=post_build_path, stage_path=artefact.upload_path(stage), overwrite=True, ) @@ -324,6 +328,9 @@ def build( anaconda_packages_manager = AnacondaPackagesManager() + # Clean up deploy root + project_paths.remove_up_deploy_root() + # Resolve dependencies if project_paths.requirements.exists(): with ( @@ -362,22 +369,50 @@ def build( ) if any(temp_deps_dir.path.iterdir()): - cli_console.step(f"Creating {project_paths.dependencies.name}") + dep_artifact = project_paths.get_dependencies_artefact() + cli_console.step(f"Creating {dep_artifact.path.name}") zip_dir( source=temp_deps_dir.path, - dest_zip=project_paths.dependencies, + dest_zip=dep_artifact.post_build_path, ) else: cli_console.step(f"No external dependencies.") artifacts = set() - for entity in get_snowpark_entities(pd).values(): - artifacts.update(entity.artifacts) - with cli_console.phase("Preparing artifacts for source code"): - for artefact in artifacts: - artefact_dto = project_paths.get_artefact_dto(artefact) - artefact_dto.build() + if FeatureFlag.ENABLE_SNOWPARK_GLOB_SUPPORT.is_enabled(): + for entity in get_snowpark_entities(pd).values(): + for artifact in entity.artifacts: + artifacts.add(project_paths.get_artefact_dto(artifact)) + + for artefact in artifacts: + bundle_map = BundleMap( + project_root=artefact.project_root, + deploy_root=project_paths.deploy_root, + ) + bundle_map.add(PathMapping(src=str(artefact.path), dest=artefact.dest)) + + if artefact.path.is_file(): + for (absolute_src, absolute_dest) in bundle_map.all_mappings( + absolute=True, expand_directories=False + ): + symlink_or_copy( + absolute_src, + absolute_dest, + deploy_root=bundle_map.deploy_root(), + ) + else: + zip_dir_using_bundle_map( + bundle_map=bundle_map, + dest_zip=artefact.post_build_path, + ) + else: + for entity in get_snowpark_entities(pd).values(): + for artifact in entity.artifacts: + artifacts.add(project_paths.get_artefact_dto(artifact)) + + for artefact in artifacts: + artefact.build() return MessageResult(f"Build done.") diff --git a/src/snowflake/cli/_plugins/snowpark/snowpark_entity_model.py b/src/snowflake/cli/_plugins/snowpark/snowpark_entity_model.py index a92716280c..5b355b804a 100644 --- a/src/snowflake/cli/_plugins/snowpark/snowpark_entity_model.py +++ b/src/snowflake/cli/_plugins/snowpark/snowpark_entity_model.py @@ -14,11 +14,13 @@ from __future__ import annotations -from pathlib import Path +import glob from typing import List, Literal, Optional, Union from pydantic import Field, field_validator +from snowflake.cli.api.feature_flags import FeatureFlag from snowflake.cli.api.identifiers import FQN +from snowflake.cli.api.project.schemas.commons import Artifacts from snowflake.cli.api.project.schemas.entities.common import ( EntityModelBase, ExternalAccessBaseModel, @@ -26,24 +28,11 @@ ) from snowflake.cli.api.project.schemas.updatable_model import ( DiscriminatorField, - UpdatableModel, ) +from snowflake.cli.api.project.schemas.v1.native_app.path_mapping import PathMapping from snowflake.cli.api.project.schemas.v1.snowpark.argument import Argument -class PathMapping(UpdatableModel): - class Config: - frozen = True - - src: Path = Field(title="Source path (relative to project root)", default=None) - - dest: Optional[str] = Field( - title="Destination path on stage", - description="Paths are relative to stage root; paths ending with a slash indicate that the destination is a directory which source files should be copied into.", - default=None, - ) - - class SnowparkEntityModel(EntityModelBase, ExternalAccessBaseModel, ImportsBaseModel): handler: str = Field( title="Function’s or procedure’s implementation of the object inside source module", @@ -59,17 +48,24 @@ class SnowparkEntityModel(EntityModelBase, ExternalAccessBaseModel, ImportsBaseM title="Python version to use when executing ", default=None ) stage: str = Field(title="Stage in which artifacts will be stored") - artifacts: List[Union[PathMapping, str]] = Field(title="List of required sources") + artifacts: Artifacts = Field(title="List of required sources") @field_validator("artifacts") @classmethod def _convert_artifacts(cls, artifacts: Union[dict, str]): _artifacts = [] - for artefact in artifacts: - if isinstance(artefact, PathMapping): - _artifacts.append(artefact) + for artifact in artifacts: + if ( + (isinstance(artifact, str) and glob.has_magic(artifact)) + or (isinstance(artifact, PathMapping) and glob.has_magic(artifact.src)) + ) and FeatureFlag.ENABLE_SNOWPARK_GLOB_SUPPORT.is_disabled(): + raise ValueError( + "If you want to use glob patterns in artifacts, you need to enable the Snowpark new build feature flag (ENABLE_SNOWPARK_GLOB_SUPPORT=true)" + ) + if isinstance(artifact, PathMapping): + _artifacts.append(artifact) else: - _artifacts.append(PathMapping(src=artefact)) + _artifacts.append(PathMapping(src=artifact)) return _artifacts @field_validator("runtime") @@ -79,14 +75,6 @@ def convert_runtime(cls, runtime_input: Union[str, float]) -> str: return str(runtime_input) return runtime_input - @field_validator("artifacts") - @classmethod - def validate_artifacts(cls, artifacts: List[Path]) -> List[Path]: - for artefact in artifacts: - if "*" in str(artefact): - raise ValueError("Glob patterns not supported for Snowpark artifacts.") - return artifacts - @property def udf_sproc_identifier(self) -> UdfSprocIdentifier: return UdfSprocIdentifier.from_definition(self) diff --git a/src/snowflake/cli/_plugins/snowpark/snowpark_project_paths.py b/src/snowflake/cli/_plugins/snowpark/snowpark_project_paths.py index 7e155a9344..cbc05e2c2d 100644 --- a/src/snowflake/cli/_plugins/snowpark/snowpark_project_paths.py +++ b/src/snowflake/cli/_plugins/snowpark/snowpark_project_paths.py @@ -13,38 +13,56 @@ # limitations under the License. from __future__ import annotations +import glob +import os +import re from dataclasses import dataclass from pathlib import Path, PurePosixPath +from typing import Optional from snowflake.cli._plugins.snowpark.snowpark_entity_model import PathMapping from snowflake.cli._plugins.snowpark.zipper import zip_dir from snowflake.cli.api.console import cli_console from snowflake.cli.api.constants import DEPLOYMENT_STAGE +from snowflake.cli.api.feature_flags import FeatureFlag from snowflake.cli.api.identifiers import FQN +from snowflake.cli.api.project.project_paths import ProjectPaths from snowflake.cli.api.secure_path import SecurePath @dataclass -class SnowparkProjectPaths: +class SnowparkProjectPaths(ProjectPaths): """ - This class represents allows you to manage files paths related to given project. + This class allows you to manage files paths related to given project. """ - project_root: Path - def path_relative_to_root(self, artifact_path: Path) -> Path: if artifact_path.is_absolute(): return artifact_path return (self.project_root / artifact_path).resolve() def get_artefact_dto(self, artifact_path: PathMapping) -> Artefact: - return Artefact( - dest=artifact_path.dest, - path=self.path_relative_to_root(artifact_path.src), - ) + if FeatureFlag.ENABLE_SNOWPARK_GLOB_SUPPORT.is_enabled(): + return Artefact( + project_root=self.project_root, + dest=artifact_path.dest, + path=Path(artifact_path.src), + ) + else: + return ArtefactOldBuild( + dest=artifact_path.dest, + path=self.path_relative_to_root(Path(artifact_path.src)), + ) def get_dependencies_artefact(self) -> Artefact: - return Artefact(dest=None, path=self.dependencies) + if FeatureFlag.ENABLE_SNOWPARK_GLOB_SUPPORT.is_enabled(): + return Artefact( + project_root=self.project_root, dest=None, path=Path("dependencies.zip") + ) + else: + return ArtefactOldBuild( + dest=None, path=self.path_relative_to_root(Path("dependencies.zip")) + ) @property def snowflake_requirements(self) -> SecurePath: @@ -56,18 +74,117 @@ def snowflake_requirements(self) -> SecurePath: def requirements(self) -> SecurePath: return SecurePath(self.path_relative_to_root(Path("requirements.txt"))) + +@dataclass(unsafe_hash=True) +class Artefact: + """Helper for getting paths related to given artefact.""" + + project_root: Path + path: Path + dest: str | None = None + + def __init__( + self, project_root: Path, path: Path, dest: Optional[str] = None + ) -> None: + self.project_root = project_root + self.path = path + self.dest = dest + if self.dest and not self._is_dest_a_file() and not self.dest.endswith("/"): + self.dest = self.dest + "/" + @property - def dependencies(self) -> Path: - return self.path_relative_to_root(Path("dependencies.zip")) + def _artefact_name(self) -> str: + if glob.has_magic(str(self.path)): + last_part = None + for part in self.path.parts: + if glob.has_magic(part): + break + else: + last_part = part + if not last_part: + last_part = os.path.commonpath( + [str(self.path), str(self.path.absolute())] + ) + return last_part + ".zip" + if (self.project_root / self.path).is_dir(): + return self.path.stem + ".zip" + if (self.project_root / self.path).is_file() and self._is_dest_a_file(): + return Path(self.dest).name # type: ignore + return self.path.name + + @property + def post_build_path(self) -> Path: + """ + Returns post-build artefact path. Directories are mapped to corresponding .zip files. + """ + deploy_root = self.deploy_root() + path = ( + self._path_until_asterisk() + if glob.has_magic(str(self.path)) + else self.path.parent + ) + if self._is_dest_a_file(): + return deploy_root / self.dest # type: ignore + return deploy_root / (self.dest or path) / self._artefact_name + + def upload_path(self, stage: FQN | str | None) -> str: + """ + Path on stage to which the artefact should be uploaded. + """ + stage = stage or DEPLOYMENT_STAGE + if isinstance(stage, str): + stage = FQN.from_stage(stage).using_context() + + stage_path = PurePosixPath(f"@{stage}") + if self.dest: + stage_path /= ( + PurePosixPath(self.dest).parent if self._is_dest_a_file() else self.dest + ) + else: + stage_path /= ( + self._path_until_asterisk() + if glob.has_magic(str(self.path)) + else PurePosixPath(self.path).parent + ) + + return str(stage_path) + "/" + + def import_path(self, stage: FQN | str | None) -> str: + """Path for UDF/sproc imports clause.""" + return self.upload_path(stage) + self._artefact_name + + def deploy_root(self) -> Path: + return self.project_root / "output" + + def _is_dest_a_file(self) -> bool: + if not self.dest: + return False + return re.search(r"\.[a-zA-Z0-9]{2,4}$", self.dest) is not None + + def _path_until_asterisk(self) -> Path: + path = [] + for part in self.path.parts: + if glob.has_magic(part): + break + else: + path.append(part) + return Path(*path[:-1]) + + # Can be removed after removing ENABLE_SNOWPARK_GLOB_SUPPORT feature flag. + def build(self) -> None: + raise NotImplementedError("Not implemented in Artefact class.") @dataclass(unsafe_hash=True) -class Artefact: +class ArtefactOldBuild(Artefact): """Helper for getting paths related to given artefact.""" path: Path dest: str | None = None + def __init__(self, path: Path, dest: Optional[str] = None) -> None: + super().__init__(project_root=Path(), path=path, dest=dest) + @property def _artefact_name(self) -> str: if self.path.is_dir(): diff --git a/src/snowflake/cli/_plugins/snowpark/zipper.py b/src/snowflake/cli/_plugins/snowpark/zipper.py index abcb457220..34889680d0 100644 --- a/src/snowflake/cli/_plugins/snowpark/zipper.py +++ b/src/snowflake/cli/_plugins/snowpark/zipper.py @@ -20,6 +20,9 @@ from typing import Dict, List, Literal from zipfile import ZIP_DEFLATED, ZipFile +from snowflake.cli._plugins.nativeapp.artifacts import BundleMap +from snowflake.cli.api.console import cli_console + log = logging.getLogger(__name__) IGNORED_FILES = [ @@ -64,6 +67,9 @@ def zip_dir( mode: Literal["r", "w", "x", "a"] = "w", ) -> None: + if not dest_zip.parent.exists(): + dest_zip.parent.mkdir(parents=True) + if isinstance(source, Path): source = [source] @@ -79,6 +85,29 @@ def zip_dir( package_zip.write(file, arcname=file.relative_to(src)) +def zip_dir_using_bundle_map( + bundle_map: BundleMap, + dest_zip: Path, + mode: Literal["r", "w", "x", "a"] = "w", +) -> None: + if not dest_zip.parent.exists(): + dest_zip.parent.mkdir(parents=True) + + with ZipFile(dest_zip, mode, ZIP_DEFLATED, allowZip64=True) as package_zip: + cli_console.step(f"Creating: {dest_zip}") + for src, _ in bundle_map.all_mappings(expand_directories=True): + if src.is_file(): + log.debug("Adding %s to %s", src, dest_zip) + package_zip.write(src, arcname=_path_without_top_level_directory(src)) + + +def _path_without_top_level_directory(path: Path) -> str: + path_parts = path.parts + if len(path_parts) > 1: + return str(Path(*path_parts[1:])) + return str(path) + + def _to_be_zipped(file: Path) -> bool: for pattern in IGNORED_FILES: # This has to be a string because of fnmatch diff --git a/src/snowflake/cli/_plugins/streamlit/commands.py b/src/snowflake/cli/_plugins/streamlit/commands.py index 9449cbc66a..5602dd8e9a 100644 --- a/src/snowflake/cli/_plugins/streamlit/commands.py +++ b/src/snowflake/cli/_plugins/streamlit/commands.py @@ -29,6 +29,9 @@ from snowflake.cli._plugins.streamlit.streamlit_entity_model import ( StreamlitEntityModel, ) +from snowflake.cli._plugins.streamlit.streamlit_project_paths import ( + StreamlitProjectPaths, +) from snowflake.cli.api.cli_global_context import get_cli_context from snowflake.cli.api.commands.decorators import ( with_experimental_behaviour, @@ -156,6 +159,8 @@ def streamlit_deploy( entity_type="streamlit" ) + streamlit_project_paths = StreamlitProjectPaths(cli_context.project_root) + if not streamlits: raise NoProjectDefinitionError( project_type="streamlit", project_root=cli_context.project_root @@ -174,7 +179,11 @@ def streamlit_deploy( # Get first streamlit streamlit: StreamlitEntityModel = streamlits[entity_id] - url = StreamlitManager().deploy(streamlit=streamlit, replace=replace) + url = StreamlitManager().deploy( + streamlit=streamlit, + streamlit_project_paths=streamlit_project_paths, + replace=replace, + ) if open_: typer.launch(url) diff --git a/src/snowflake/cli/_plugins/streamlit/manager.py b/src/snowflake/cli/_plugins/streamlit/manager.py index 3eabd528d9..1ec889089a 100644 --- a/src/snowflake/cli/_plugins/streamlit/manager.py +++ b/src/snowflake/cli/_plugins/streamlit/manager.py @@ -15,7 +15,7 @@ from __future__ import annotations import logging -from pathlib import Path +from pathlib import PurePosixPath from typing import List, Optional from click import ClickException @@ -24,17 +24,22 @@ MissingConnectionRegionError, make_snowsight_url, ) +from snowflake.cli._plugins.nativeapp.artifacts import BundleMap, symlink_or_copy from snowflake.cli._plugins.object.manager import ObjectManager from snowflake.cli._plugins.stage.manager import StageManager from snowflake.cli._plugins.streamlit.streamlit_entity_model import ( StreamlitEntityModel, ) +from snowflake.cli._plugins.streamlit.streamlit_project_paths import ( + StreamlitProjectPaths, +) from snowflake.cli.api.commands.experimental_behaviour import ( experimental_behaviour_enabled, ) from snowflake.cli.api.console import cli_console from snowflake.cli.api.feature_flags import FeatureFlag from snowflake.cli.api.identifiers import FQN +from snowflake.cli.api.project.schemas.v1.native_app.path_mapping import PathMapping from snowflake.cli.api.sql_execution import SqlExecutionMixin from snowflake.connector.cursor import SnowflakeCursor from snowflake.connector.errors import ProgrammingError @@ -54,26 +59,43 @@ def share(self, streamlit_name: FQN, to_role: str) -> SnowflakeCursor: def _put_streamlit_files( self, - root_location: str, - artifacts: Optional[List[Path]] = None, + streamlit_project_paths: StreamlitProjectPaths, + stage_root: str, + artifacts: Optional[List[PathMapping]] = None, ): - cli_console.step(f"Deploying files to {root_location}") + cli_console.step(f"Deploying files to {stage_root}") if not artifacts: return stage_manager = StageManager() - for file in artifacts: - if file.is_dir(): - if not any(file.iterdir()): - cli_console.warning(f"Skipping empty directory: {file}") - continue + bundle_map = BundleMap( + project_root=streamlit_project_paths.project_root, + deploy_root=streamlit_project_paths.deploy_root, + ) + for artifact in artifacts: + bundle_map.add(PathMapping(src=str(artifact.src), dest=artifact.dest)) + + # Clean up deploy root + streamlit_project_paths.remove_up_deploy_root() + for (absolute_src, absolute_dest) in bundle_map.all_mappings( + absolute=True, expand_directories=True + ): + if absolute_src.is_file(): + symlink_or_copy( + absolute_src, + absolute_dest, + deploy_root=streamlit_project_paths.deploy_root, + ) + # Temporary solution, will be replaced with diff + stage_path = ( + PurePosixPath(absolute_dest) + .relative_to(streamlit_project_paths.deploy_root) + .parent + ) + full_stage_path = f"{stage_root}/{stage_path}".rstrip("/") stage_manager.put( - f"{file.joinpath('*')}", f"{root_location}/{file}", 4, True + local_path=absolute_dest, stage_path=full_stage_path, overwrite=True ) - elif len(file.parts) > 1: - stage_manager.put(file, f"{root_location}/{file.parent}", 4, True) - else: - stage_manager.put(file, root_location, 4, True) def _create_streamlit( self, @@ -120,7 +142,12 @@ def _create_streamlit( self.execute_query("\n".join(query)) - def deploy(self, streamlit: StreamlitEntityModel, replace: bool = False): + def deploy( + self, + streamlit: StreamlitEntityModel, + streamlit_project_paths: StreamlitProjectPaths, + replace: bool = False, + ): streamlit_id = streamlit.fqn.using_connection(self._conn) if ( ObjectManager().object_exists(object_type="streamlit", fqn=streamlit_id) @@ -172,12 +199,13 @@ def deploy(self, streamlit: StreamlitEntityModel, replace: bool = False): embedded_stage_name = f"snow://streamlit/{stage_path}" if use_versioned_stage: # "LIVE" is the only supported version for now, but this may change later. - root_location = f"{embedded_stage_name}/versions/live" + stage_root = f"{embedded_stage_name}/versions/live" else: - root_location = f"{embedded_stage_name}/default_checkout" + stage_root = f"{embedded_stage_name}/default_checkout" self._put_streamlit_files( - root_location, + streamlit_project_paths, + stage_root, streamlit.artifacts, ) else: @@ -194,16 +222,18 @@ def deploy(self, streamlit: StreamlitEntityModel, replace: bool = False): cli_console.step(f"Creating {stage_name} stage") stage_manager.create(fqn=stage_name) - root_location = stage_manager.get_standard_stage_prefix( + stage_root = stage_manager.get_standard_stage_prefix( f"{stage_name}/{streamlit_name_for_root_location}" ) - self._put_streamlit_files(root_location, streamlit.artifacts) + self._put_streamlit_files( + streamlit_project_paths, stage_root, streamlit.artifacts + ) self._create_streamlit( streamlit=streamlit, replace=replace, - from_stage_name=root_location, + from_stage_name=stage_root, experimental=False, ) diff --git a/src/snowflake/cli/_plugins/streamlit/streamlit_entity_model.py b/src/snowflake/cli/_plugins/streamlit/streamlit_entity_model.py index 55068adb5a..0ffcf8f124 100644 --- a/src/snowflake/cli/_plugins/streamlit/streamlit_entity_model.py +++ b/src/snowflake/cli/_plugins/streamlit/streamlit_entity_model.py @@ -13,10 +13,10 @@ # limitations under the License. from __future__ import annotations -from pathlib import Path -from typing import List, Literal, Optional +from typing import Literal, Optional -from pydantic import Field, model_validator +from pydantic import Field, field_validator +from snowflake.cli.api.project.schemas.commons import Artifacts from snowflake.cli.api.project.schemas.entities.common import ( EntityModelBase, ExternalAccessBaseModel, @@ -25,6 +25,7 @@ from snowflake.cli.api.project.schemas.updatable_model import ( DiscriminatorField, ) +from snowflake.cli.api.project.schemas.v1.native_app.path_mapping import PathMapping class StreamlitEntityModel(EntityModelBase, ExternalAccessBaseModel, ImportsBaseModel): @@ -43,24 +44,20 @@ class StreamlitEntityModel(EntityModelBase, ExternalAccessBaseModel, ImportsBase stage: Optional[str] = Field( title="Stage in which the app’s artifacts will be stored", default="streamlit" ) - # Possibly can be PathMapping - artifacts: Optional[List[Path]] = Field( + artifacts: Optional[Artifacts] = Field( title="List of files which should be deployed. Each file needs to exist locally. " "Main file needs to be included in the artifacts.", default=None, ) - @model_validator(mode="after") - def artifacts_must_exists(self): - if not self.artifacts: - return self - - for artifact in self.artifacts: - if "*" in artifact.name: - continue - if not artifact.exists(): - raise ValueError( - f"Specified artifact {artifact} does not exist locally." - ) - - return self + @field_validator("artifacts") + @classmethod + def _convert_artifacts(cls, artifacts: Artifacts) -> Artifacts: + _artifacts = [] + for artifact in artifacts: + if isinstance(artifact, PathMapping): + path_mapping = artifact + else: + path_mapping = PathMapping(src=artifact) + _artifacts.append(path_mapping) + return _artifacts diff --git a/src/snowflake/cli/_plugins/streamlit/streamlit_project_paths.py b/src/snowflake/cli/_plugins/streamlit/streamlit_project_paths.py new file mode 100644 index 0000000000..fbe89959b2 --- /dev/null +++ b/src/snowflake/cli/_plugins/streamlit/streamlit_project_paths.py @@ -0,0 +1,25 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +from dataclasses import dataclass + +from snowflake.cli.api.project.project_paths import ProjectPaths + + +@dataclass +class StreamlitProjectPaths(ProjectPaths): + """ + This class allows you to manage files paths related to given project. + """ diff --git a/src/snowflake/cli/api/feature_flags.py b/src/snowflake/cli/api/feature_flags.py index 2ed9728e55..b8383aa417 100644 --- a/src/snowflake/cli/api/feature_flags.py +++ b/src/snowflake/cli/api/feature_flags.py @@ -52,3 +52,4 @@ class FeatureFlag(FeatureFlagMixin): ENABLE_STREAMLIT_VERSIONED_STAGE = BooleanFlag( "ENABLE_STREAMLIT_VERSIONED_STAGE", False ) + ENABLE_SNOWPARK_GLOB_SUPPORT = BooleanFlag("ENABLE_SNOWPARK_GLOB_SUPPORT", False) diff --git a/src/snowflake/cli/api/project/definition_conversion.py b/src/snowflake/cli/api/project/definition_conversion.py index 1f76e998d2..71a6e10c47 100644 --- a/src/snowflake/cli/api/project/definition_conversion.py +++ b/src/snowflake/cli/api/project/definition_conversion.py @@ -223,10 +223,11 @@ def convert_streamlit_to_v2_data(streamlit: Streamlit) -> Dict[str, Any]: environment_file, pages_dir, ] - artifacts = [a for a in artifacts if a is not None] + artifacts = [str(a) for a in artifacts if a is not None] if streamlit.additional_source_files: - artifacts.extend(streamlit.additional_source_files) + for additional_file in streamlit.additional_source_files: + artifacts.append(str(additional_file)) identifier = {"name": streamlit.name} if streamlit.schema_name: diff --git a/src/snowflake/cli/api/project/project_paths.py b/src/snowflake/cli/api/project/project_paths.py new file mode 100644 index 0000000000..f6b2a14d99 --- /dev/null +++ b/src/snowflake/cli/api/project/project_paths.py @@ -0,0 +1,16 @@ +from dataclasses import dataclass +from pathlib import Path +from shutil import rmtree + + +@dataclass +class ProjectPaths: + project_root: Path + + @property + def deploy_root(self) -> Path: + return self.project_root / "output" + + def remove_up_deploy_root(self) -> None: + if self.deploy_root.exists(): + rmtree(self.deploy_root) diff --git a/src/snowflake/cli/api/project/schemas/commons.py b/src/snowflake/cli/api/project/schemas/commons.py new file mode 100644 index 0000000000..b512afbb39 --- /dev/null +++ b/src/snowflake/cli/api/project/schemas/commons.py @@ -0,0 +1,5 @@ +from typing import List, Union + +from snowflake.cli.api.project.schemas.v1.native_app.path_mapping import PathMapping + +Artifacts = List[Union[PathMapping, str]] diff --git a/src/snowflake/cli/api/project/schemas/v1/native_app/native_app.py b/src/snowflake/cli/api/project/schemas/v1/native_app/native_app.py index 2123dcacf1..d0c065c7f3 100644 --- a/src/snowflake/cli/api/project/schemas/v1/native_app/native_app.py +++ b/src/snowflake/cli/api/project/schemas/v1/native_app/native_app.py @@ -15,9 +15,10 @@ from __future__ import annotations import re -from typing import List, Optional, Union +from typing import List, Optional from pydantic import Field, field_validator +from snowflake.cli.api.project.schemas.commons import Artifacts from snowflake.cli.api.project.schemas.updatable_model import UpdatableModel from snowflake.cli.api.project.schemas.v1.native_app.application import ( Application, @@ -34,7 +35,7 @@ class NativeApp(UpdatableModel): name: str = Field( title="Project identifier", ) - artifacts: List[Union[PathMapping, str]] = Field( + artifacts: Artifacts = Field( title="List of file source and destination pairs to add to the deploy root", ) bundle_root: Optional[str] = Field( @@ -69,10 +70,8 @@ def validate_source_stage(cls, input_value: str): @field_validator("artifacts") @classmethod - def transform_artifacts( - cls, orig_artifacts: List[Union[PathMapping, str]] - ) -> List[PathMapping]: - transformed_artifacts = [] + def transform_artifacts(cls, orig_artifacts: Artifacts) -> List[PathMapping]: + transformed_artifacts: List[PathMapping] = [] if orig_artifacts is None: return transformed_artifacts diff --git a/tests/helpers/__snapshots__/test_v1_to_v2.ambr b/tests/helpers/__snapshots__/test_v1_to_v2.ambr index 2e7458624b..d1e6e41d71 100644 --- a/tests/helpers/__snapshots__/test_v1_to_v2.ambr +++ b/tests/helpers/__snapshots__/test_v1_to_v2.ambr @@ -115,10 +115,10 @@ pages_dir: pages stage: artifacts: - - streamlit_app.py - - environment.yml - - pages - - common/hello.py + - src: streamlit_app.py + - src: environment.yml + - src: pages + - src: common/hello.py env: streamlit_title: My Fancy Streamlit @@ -388,7 +388,7 @@ type: string stage: dev_deployment artifacts: - - src: app + - src: app/ dest: my_snowpark_project type: procedure execute_as_caller: false @@ -412,7 +412,7 @@ runtime: '3.10' stage: dev_deployment artifacts: - - src: app + - src: app/ dest: my_snowpark_project type: function test_streamlit: @@ -425,9 +425,9 @@ pages_dir: None stage: streamlit artifacts: - - streamlit_app.py - - environment.yml - - pages + - src: streamlit_app.py + - src: environment.yml + - src: pages pkg: meta: role: pkg_role diff --git a/tests/project/test_project_definition_v2.py b/tests/project/test_project_definition_v2.py index f54497ae74..e639f41cfa 100644 --- a/tests/project/test_project_definition_v2.py +++ b/tests/project/test_project_definition_v2.py @@ -375,7 +375,7 @@ def test_v1_to_v2_conversion( ) artifact = PathMapping( - src=Path(definition_v1.snowpark.src), + src=definition_v1.snowpark.src, dest=definition_v1.snowpark.project_name, ) for v1_procedure in definition_v1.snowpark.procedures: diff --git a/tests/snowpark/__snapshots__/test_function_old_build.ambr b/tests/snowpark/__snapshots__/test_function_old_build.ambr new file mode 100644 index 0000000000..edde16c81f --- /dev/null +++ b/tests/snowpark/__snapshots__/test_function_old_build.ambr @@ -0,0 +1,62 @@ +# serializer version: 1 +# name: test_deploy_function_fully_qualified_name[ok] + ''' + Performing initial validation + Checking remote state + Preparing required stages and artifacts + Creating (if not exists) stage: dev_deployment + Uploading app.zip to @MockDatabase.MockSchema.dev_deployment/my_snowpark_project/ + Creating Snowpark entities + Creating function custom_db.custom_schema.fqn_function + Creating function custom_schema.fqn_function_only_schema + Creating function custom_schema.schema_function + Creating function custom_db.PUBLIC.database_function + Creating function custom_db.custom_schema.database_function + Creating function custom_database.custom_schema.fqn_function3 + +------------------------------------------------------------------------------+ + | object | type | status | + |---------------------------------------------------------+----------+---------| + | custom_db.custom_schema.fqn_function(name string) | function | created | + | MockDatabase.custom_schema.fqn_function_only_schema(nam | function | created | + | e string) | | | + | MockDatabase.custom_schema.schema_function(name string) | function | created | + | custom_db.MockSchema.database_function(name string) | function | created | + | custom_db.custom_schema.database_function(name string) | function | created | + | custom_database.custom_schema.fqn_function3(name | function | created | + | string) | | | + +------------------------------------------------------------------------------+ + + ''' +# --- +# name: test_deploy_function_fully_qualified_name_duplicated_database[database error] + ''' + Performing initial validation + Checking remote state + +- Error ----------------------------------------------------------------------+ + | Database provided but name | + | 'custom_database.custom_schema.fqn_function_error' is fully qualified name. | + +------------------------------------------------------------------------------+ + + ''' +# --- +# name: test_deploy_function_fully_qualified_name_duplicated_schema[schema error] + ''' + Performing initial validation + Checking remote state + +- Error ----------------------------------------------------------------------+ + | Schema provided but name 'custom_schema.fqn_function_error' is fully | + | qualified name. | + +------------------------------------------------------------------------------+ + + ''' +# --- +# name: test_deploy_function_secrets_without_external_access + ''' + Performing initial validation + Checking remote state + +- Error ----------------------------------------------------------------------+ + | func1 defined with secrets but without external integration. | + +------------------------------------------------------------------------------+ + + ''' +# --- diff --git a/tests/snowpark/__snapshots__/test_models.ambr b/tests/snowpark/__snapshots__/test_models.ambr new file mode 100644 index 0000000000..9564dc9303 --- /dev/null +++ b/tests/snowpark/__snapshots__/test_models.ambr @@ -0,0 +1,14 @@ +# serializer version: 1 +# name: test_raise_error_when_artifact_contains_asterix + ''' + +- Error ----------------------------------------------------------------------+ + | During evaluation of DefinitionV20 in project definition following errors | + | were encountered: | + | For field entities.hello_procedure.procedure.artifacts you provided | + | '['src/*']'. This caused: Value error, If you want to use glob patterns in | + | artifacts, you need to enable the Snowpark new build feature flag | + | (ENABLE_SNOWPARK_GLOB_SUPPORT=true) | + +------------------------------------------------------------------------------+ + + ''' +# --- diff --git a/tests/snowpark/__snapshots__/test_procedure_old_build.ambr b/tests/snowpark/__snapshots__/test_procedure_old_build.ambr new file mode 100644 index 0000000000..aab627199a --- /dev/null +++ b/tests/snowpark/__snapshots__/test_procedure_old_build.ambr @@ -0,0 +1,56 @@ +# serializer version: 1 +# name: test_deploy_procedure_fails_if_integration_does_not_exists + ''' + Performing initial validation + Checking remote state + +- Error ----------------------------------------------------------------------+ + | Following external access integration does not exists in Snowflake: | + | external_2 | + +------------------------------------------------------------------------------+ + + ''' +# --- +# name: test_deploy_procedure_fails_if_object_exists_and_no_replace + ''' + Performing initial validation + Checking remote state + +- Error ----------------------------------------------------------------------+ + | Following objects already exists. Consider using --replace. | + | procedure: procedureName | + | procedure: test | + +------------------------------------------------------------------------------+ + + ''' +# --- +# name: test_deploy_procedure_fully_qualified_name[database error] + ''' + Performing initial validation + Checking remote state + +- Error ----------------------------------------------------------------------+ + | Database provided but name | + | 'custom_database.custom_schema.fqn_procedure_error' is fully qualified name. | + +------------------------------------------------------------------------------+ + + ''' +# --- +# name: test_deploy_procedure_fully_qualified_name_duplicated_schema[schema error] + ''' + Performing initial validation + Checking remote state + +- Error ----------------------------------------------------------------------+ + | Schema provided but name 'custom_schema.fqn_procedure_error' is fully | + | qualified name. | + +------------------------------------------------------------------------------+ + + ''' +# --- +# name: test_deploy_procedure_secrets_without_external_access + ''' + Performing initial validation + Checking remote state + +- Error ----------------------------------------------------------------------+ + | procedureName defined with secrets but without external integration. | + +------------------------------------------------------------------------------+ + + ''' +# --- diff --git a/tests/snowpark/test_artifacts.py b/tests/snowpark/test_artifacts.py new file mode 100644 index 0000000000..c573ab9b6d --- /dev/null +++ b/tests/snowpark/test_artifacts.py @@ -0,0 +1,215 @@ +import os +from pathlib import Path +from unittest import mock + +import pytest +from snowflake.cli.api.errno import DOES_NOT_EXIST_OR_NOT_AUTHORIZED +from snowflake.connector import ProgrammingError +from snowflake.connector.compat import IS_WINDOWS + +mock_session_has_warehouse = mock.patch( + "snowflake.cli.api.sql_execution.SqlExecutionMixin.session_has_warehouse", + lambda _: True, +) + + +@pytest.mark.parametrize( + "artifacts, local_path, stage_path", + [ + ("src", Path("output") / "src.zip", "/"), + ("src/", Path("output") / "src.zip", "/"), + ("src/*", Path("output") / "src.zip", "/"), + ("src/*.py", Path("output") / "src.zip", "/"), + ( + "src/dir/dir_app.py", + Path("output") / "src" / "dir" / "dir_app.py", + "/src/dir/", + ), + ( + {"src": "src/**/*", "dest": "source/"}, + Path("output") / "source" / "src.zip", + "/source/", + ), + ( + {"src": "src", "dest": "source/"}, + Path("output") / "source" / "src.zip", + "/source/", + ), + ( + {"src": "src/", "dest": "source/"}, + Path("output") / "source" / "src.zip", + "/source/", + ), + ( + {"src": "src/*", "dest": "source/"}, + Path("output") / "source" / "src.zip", + "/source/", + ), + ( + {"src": "src/dir/dir_app.py", "dest": "source/dir/apps/"}, + "output/source/dir/apps/dir_app.py", + "/source/dir/apps/", + ), + ], +) +@mock.patch("snowflake.connector.connect") +@mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.describe") +@mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.show") +@mock.patch("snowflake.cli._plugins.snowpark.commands.StageManager.put") +@mock_session_has_warehouse +def test_build_and_deploy_with_artifacts( + mock_sm_put, + mock_om_show, + mock_om_describe, + mock_conn, + runner, + mock_ctx, + project_directory, + alter_snowflake_yml, + artifacts, + local_path, + stage_path, + enable_snowpark_glob_support_feature_flag, +): + mock_om_describe.side_effect = ProgrammingError( + errno=DOES_NOT_EXIST_OR_NOT_AUTHORIZED + ) + ctx = mock_ctx() + mock_conn.return_value = ctx + + with project_directory("glob_patterns") as tmp: + alter_snowflake_yml( + tmp / "snowflake.yml", "entities.hello_procedure.artifacts", [artifacts] + ) + + result = runner.invoke( + [ + "snowpark", + "build", + ] + ) + assert result.exit_code == 0, result.output + + result = runner.invoke( + [ + "snowpark", + "deploy", + ] + ) + assert result.exit_code == 0, result.output + # Windows needs absolute paths. + if IS_WINDOWS: + tmp_path = tmp.absolute() + else: + tmp_path = tmp.resolve() + assert { + "local_path": tmp_path / local_path, + "stage_path": "@MockDatabase.MockSchema.dev_deployment" + stage_path, + } in _extract_put_calls(mock_sm_put) + + +@pytest.mark.parametrize( + "artifact, local_path, stage_path", + [ + ("src", Path("output") / "src.zip", "/"), + ("src/", Path("output") / "src.zip", "/"), + ("src/*", Path("output") / "src.zip", "/"), + ("src/*.py", Path("output") / "src.zip", "/"), + ( + "src/dir/dir_app.py", + Path("output") / "src" / "dir" / "dir_app.py", + "/src/dir/", + ), + ( + {"src": "src/**/*", "dest": "source/"}, + Path("output") / "source" / "src.zip", + "/source/", + ), + ( + {"src": "src", "dest": "source/"}, + Path("output") / "source" / "src.zip", + "/source/", + ), + ( + {"src": "src/", "dest": "source/"}, + Path("output") / "source" / "src.zip", + "/source/", + ), + ( + {"src": "src/*", "dest": "source/"}, + Path("output") / "source" / "src.zip", + "/source/", + ), + ( + {"src": "src/dir/dir_app.py", "dest": "source/dir/apps/"}, + Path("output") / "source" / "dir" / "apps" / "dir_app.py", + "/source/dir/apps/", + ), + ], +) +@mock.patch("snowflake.connector.connect") +@mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.describe") +@mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.show") +@mock.patch("snowflake.cli._plugins.snowpark.commands.StageManager.put") +@mock_session_has_warehouse +def test_build_and_deploy_with_artifacts_run_from_other_directory( + mock_sm_put, + mock_om_show, + mock_om_describe, + mock_conn, + runner, + mock_ctx, + project_directory, + alter_snowflake_yml, + artifact, + local_path, + stage_path, + enable_snowpark_glob_support_feature_flag, +): + mock_om_describe.side_effect = ProgrammingError( + errno=DOES_NOT_EXIST_OR_NOT_AUTHORIZED + ) + ctx = mock_ctx() + mock_conn.return_value = ctx + + with project_directory("glob_patterns") as tmp: + os.chdir(Path(os.getcwd()).parent) + alter_snowflake_yml( + tmp / "snowflake.yml", "entities.hello_procedure.artifacts", [artifact] + ) + + result = runner.invoke( + [ + "snowpark", + "build", + "-p", + tmp, + ] + ) + assert result.exit_code == 0, result.output + + result = runner.invoke( + [ + "snowpark", + "deploy", + "-p", + tmp, + ] + ) + assert result.exit_code == 0, result.output + assert { + "local_path": tmp / local_path, + "stage_path": "@MockDatabase.MockSchema.dev_deployment" + stage_path, + } in _extract_put_calls(mock_sm_put) + + +def _extract_put_calls(mock_sm_put): + # Extract the put calls from the mock for better visibility in test logs + return [ + { + "local_path": call.kwargs.get("local_path"), + "stage_path": call.kwargs.get("stage_path"), + } + for call in mock_sm_put.mock_calls + if call.kwargs.get("local_path") + ] diff --git a/tests/snowpark/test_build.py b/tests/snowpark/test_build.py index 008a832dd1..9298ed2b32 100644 --- a/tests/snowpark/test_build.py +++ b/tests/snowpark/test_build.py @@ -11,9 +11,11 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - +from typing import Set from unittest.mock import patch +from zipfile import ZipFile +import pytest from snowflake.cli._plugins.snowpark.package_utils import ( DownloadUnavailablePackagesResult, ) @@ -28,3 +30,37 @@ def test_snowpark_build_no_deprecated_warnings_by_default( result = runner.invoke(["snowpark", "build", "--ignore-anaconda"]) assert result.exit_code == 0, result.output assert "flag is deprecated" not in result.output + + +@pytest.mark.parametrize( + "artifacts, zip_name, expected_files", + [ + ("src", "src.zip", {"app.py", "dir/dir_app.py"}), + ("src/", "src.zip", {"app.py", "dir/dir_app.py"}), + ("src/*", "src.zip", {"app.py", "dir/dir_app.py"}), + ("src/*.py", "src.zip", {"app.py"}), + ("src/**/*.py", "src.zip", {"app.py", "dir/dir_app.py"}), + ], +) +def test_build_with_glob_patterns_in_artifacts( + runner, + enable_snowpark_glob_support_feature_flag, + project_directory, + alter_snowflake_yml, + artifacts, + zip_name, + expected_files, +): + with project_directory("glob_patterns") as tmp_dir: + alter_snowflake_yml( + tmp_dir / "snowflake.yml", "entities.hello_procedure.artifacts", [artifacts] + ) + + result = runner.invoke(["snowpark", "build", "--ignore-anaconda"]) + assert result.exit_code == 0, result.output + _assert_zip_contains(tmp_dir / "output" / zip_name, expected_files) + + +def _assert_zip_contains(app_zip: str, expected_files: Set[str]): + zip_file = ZipFile(app_zip) + assert set(zip_file.namelist()) == expected_files diff --git a/tests/snowpark/test_function.py b/tests/snowpark/test_function.py index a6559e6bd7..7ee4a27072 100644 --- a/tests/snowpark/test_function.py +++ b/tests/snowpark/test_function.py @@ -18,6 +18,9 @@ from unittest import mock import pytest +from snowflake.cli._plugins.snowpark.package_utils import ( + DownloadUnavailablePackagesResult, +) from snowflake.cli.api.errno import DOES_NOT_EXIST_OR_NOT_AUTHORIZED from snowflake.connector import ProgrammingError @@ -138,6 +141,7 @@ def test_deploy_function_secrets_without_external_access( mock_ctx, project_directory, os_agnostic_snapshot, + enable_snowpark_glob_support_feature_flag, ): mock_object_manager.return_value.show.return_value = [ {"name": "external_1", "type": "EXTERNAL_ACCESS"}, @@ -147,6 +151,15 @@ def test_deploy_function_secrets_without_external_access( mock_conn.return_value = ctx with project_directory("snowpark_function_secrets_without_external_access"): + result = runner.invoke( + [ + "snowpark", + "build", + "--ignore-anaconda", + ], + catch_exceptions=False, + ) + assert result.exit_code == 0, result.output result = runner.invoke( [ "snowpark", @@ -159,14 +172,20 @@ def test_deploy_function_secrets_without_external_access( @mock.patch("snowflake.connector.connect") +@mock.patch( + "snowflake.cli._plugins.snowpark.package_utils.download_unavailable_packages" +) @mock_session_has_warehouse def test_deploy_function_no_changes( + mock_download, mock_connector, runner, mock_ctx, mock_cursor, project_directory, + enable_snowpark_glob_support_feature_flag, ): + mock_download.return_value = DownloadUnavailablePackagesResult() rows = [ ("packages", '["foo==1.2.3", "bar>=3.0.0"]'), ("handler", "app.func1_handler"), @@ -195,19 +214,25 @@ def test_deploy_function_no_changes( ] assert queries == [ "create stage if not exists IDENTIFIER('MockDatabase.MockSchema.dev_deployment') comment='deployments managed by Snowflake CLI'", - f"put file://{Path(project_dir).resolve()}/app.py @MockDatabase.MockSchema.dev_deployment/my_snowpark_project/ auto_compress=false parallel=4 overwrite=True", + f"put file://{Path(project_dir).resolve()}/output/my_snowpark_project/app.py @MockDatabase.MockSchema.dev_deployment/my_snowpark_project/ auto_compress=false parallel=4 overwrite=True", ] @mock.patch("snowflake.connector.connect") +@mock.patch( + "snowflake.cli._plugins.snowpark.package_utils.download_unavailable_packages" +) @mock_session_has_warehouse def test_deploy_function_needs_update_because_packages_changes( + mock_download, mock_connector, runner, mock_ctx, mock_cursor, project_directory, + enable_snowpark_glob_support_feature_flag, ): + mock_download.return_value = DownloadUnavailablePackagesResult() rows = [ ("packages", '["foo==1.2.3"]'), ("handler", "main.py:app"), @@ -234,7 +259,7 @@ def test_deploy_function_needs_update_because_packages_changes( ] assert queries == [ "create stage if not exists IDENTIFIER('MockDatabase.MockSchema.dev_deployment') comment='deployments managed by Snowflake CLI'", - f"put file://{Path(project_dir).resolve()}/app.py @MockDatabase.MockSchema.dev_deployment/my_snowpark_project/ auto_compress=false parallel=4 overwrite=True", + f"put file://{Path(project_dir).resolve()}/output/my_snowpark_project/app.py @MockDatabase.MockSchema.dev_deployment/my_snowpark_project/ auto_compress=false parallel=4 overwrite=True", dedent( """\ create or replace function IDENTIFIER('MockDatabase.MockSchema.func1')(a string default 'default value', b variant) @@ -251,14 +276,20 @@ def test_deploy_function_needs_update_because_packages_changes( @mock.patch("snowflake.connector.connect") +@mock.patch( + "snowflake.cli._plugins.snowpark.package_utils.download_unavailable_packages" +) @mock_session_has_warehouse def test_deploy_function_needs_update_because_handler_changes( + mock_download, mock_connector, runner, mock_ctx, mock_cursor, project_directory, + enable_snowpark_glob_support_feature_flag, ): + mock_download.return_value = DownloadUnavailablePackagesResult() rows = [ ("packages", '["foo==1.2.3", "bar>=3.0.0"]'), ("handler", "main.py:oldApp"), @@ -285,7 +316,7 @@ def test_deploy_function_needs_update_because_handler_changes( ] assert queries == [ "create stage if not exists IDENTIFIER('MockDatabase.MockSchema.dev_deployment') comment='deployments managed by Snowflake CLI'", - f"put file://{Path(project_dir).resolve()}/app.py @MockDatabase.MockSchema.dev_deployment/my_snowpark_project/" + f"put file://{Path(project_dir).resolve()}/output/my_snowpark_project/app.py @MockDatabase.MockSchema.dev_deployment/my_snowpark_project/" f" auto_compress=false parallel=4 overwrite=True", dedent( """\ @@ -305,8 +336,12 @@ def test_deploy_function_needs_update_because_handler_changes( @mock.patch("snowflake.connector.connect") @mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.describe") @mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.show") +@mock.patch( + "snowflake.cli._plugins.snowpark.package_utils.download_unavailable_packages" +) @mock_session_has_warehouse def test_deploy_function_fully_qualified_name_duplicated_database( + mock_download, mock_om_show, mock_om_describe, mock_conn, @@ -315,7 +350,9 @@ def test_deploy_function_fully_qualified_name_duplicated_database( project_directory, alter_snowflake_yml, os_agnostic_snapshot, + enable_snowpark_glob_support_feature_flag, ): + mock_download.return_value = DownloadUnavailablePackagesResult() number_of_functions_in_project = 6 mock_om_describe.side_effect = [ ProgrammingError(errno=DOES_NOT_EXIST_OR_NOT_AUTHORIZED), @@ -323,7 +360,16 @@ def test_deploy_function_fully_qualified_name_duplicated_database( ctx = mock_ctx() mock_conn.return_value = ctx - with project_directory("snowpark_function_fully_qualified_name") as tmp_dir: + with project_directory("snowpark_function_fully_qualified_name"): + result = runner.invoke( + [ + "snowpark", + "build", + "--ignore-anaconda", + ], + catch_exceptions=False, + ) + assert result.exit_code == 0, result.output result = runner.invoke(["snowpark", "deploy"]) assert result.output == os_agnostic_snapshot(name="database error") @@ -331,8 +377,12 @@ def test_deploy_function_fully_qualified_name_duplicated_database( @mock.patch("snowflake.connector.connect") @mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.describe") @mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.show") +@mock.patch( + "snowflake.cli._plugins.snowpark.package_utils.download_unavailable_packages" +) @mock_session_has_warehouse def test_deploy_function_fully_qualified_name_duplicated_schema( + mock_download, mock_om_show, mock_om_describe, mock_conn, @@ -341,7 +391,9 @@ def test_deploy_function_fully_qualified_name_duplicated_schema( project_directory, alter_snowflake_yml, os_agnostic_snapshot, + enable_snowpark_glob_support_feature_flag, ): + mock_download.return_value = DownloadUnavailablePackagesResult() number_of_functions_in_project = 6 mock_om_describe.side_effect = [ ProgrammingError(errno=DOES_NOT_EXIST_OR_NOT_AUTHORIZED), @@ -355,6 +407,15 @@ def test_deploy_function_fully_qualified_name_duplicated_schema( parameter_path="snowpark.functions.5.name", value="custom_schema.fqn_function_error", ) + result = runner.invoke( + [ + "snowpark", + "build", + "--ignore-anaconda", + ], + catch_exceptions=False, + ) + assert result.exit_code == 0, result.output result = runner.invoke(["snowpark", "deploy"]) assert result.output == os_agnostic_snapshot(name="schema error") @@ -362,8 +423,12 @@ def test_deploy_function_fully_qualified_name_duplicated_schema( @mock.patch("snowflake.connector.connect") @mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.describe") @mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.show") +@mock.patch( + "snowflake.cli._plugins.snowpark.package_utils.download_unavailable_packages" +) @mock_session_has_warehouse def test_deploy_function_fully_qualified_name( + mock_download, mock_om_show, mock_om_describe, mock_conn, @@ -372,7 +437,9 @@ def test_deploy_function_fully_qualified_name( project_directory, alter_snowflake_yml, os_agnostic_snapshot, + enable_snowpark_glob_support_feature_flag, ): + mock_download.return_value = DownloadUnavailablePackagesResult() number_of_functions_in_project = 6 mock_om_describe.side_effect = [ ProgrammingError(errno=DOES_NOT_EXIST_OR_NOT_AUTHORIZED), @@ -386,6 +453,15 @@ def test_deploy_function_fully_qualified_name( parameter_path="snowpark.functions.5.name", value="fqn_function3", ) + result = runner.invoke( + [ + "snowpark", + "build", + "--ignore-anaconda", + ], + catch_exceptions=False, + ) + assert result.exit_code == 0, result.output result = runner.invoke(["snowpark", "deploy"]) assert result.exit_code == 0 assert result.output == os_agnostic_snapshot(name="ok") @@ -403,8 +479,12 @@ def test_deploy_function_fully_qualified_name( ) @mock.patch("snowflake.connector.connect") @mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager") +@mock.patch( + "snowflake.cli._plugins.snowpark.package_utils.download_unavailable_packages" +) @mock_session_has_warehouse def test_deploy_function_with_empty_default_value( + mock_download, mock_object_manager, mock_connector, mock_ctx, @@ -413,7 +493,9 @@ def test_deploy_function_with_empty_default_value( alter_snowflake_yml, parameter_type, default_value, + enable_snowpark_glob_support_feature_flag, ): + mock_download.return_value = DownloadUnavailablePackagesResult() mock_object_manager.return_value.describe.side_effect = ProgrammingError( errno=DOES_NOT_EXIST_OR_NOT_AUTHORIZED ) @@ -432,6 +514,15 @@ def test_deploy_function_with_empty_default_value( parameter_path=f"snowpark.functions.0.runtime", value="3.10", ) + result = runner.invoke( + [ + "snowpark", + "build", + "--ignore-anaconda", + ], + catch_exceptions=False, + ) + assert result.exit_code == 0, result.output result = runner.invoke( ["snowpark", "deploy", "--format", "json"], catch_exceptions=False ) @@ -493,6 +584,15 @@ def _deploy_function( (Path(temp_dir) / "requirements.snowflake.txt").write_text( "foo==1.2.3\nbar>=3.0.0" ) + result = runner.invoke( + [ + "snowpark", + "build", + "--ignore-anaconda", + ], + catch_exceptions=False, + ) + assert result.exit_code == 0, result.output result = runner.invoke( [ "snowpark", diff --git a/tests/snowpark/test_function_old_build.py b/tests/snowpark/test_function_old_build.py new file mode 100644 index 0000000000..2a78955d82 --- /dev/null +++ b/tests/snowpark/test_function_old_build.py @@ -0,0 +1,506 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +from pathlib import Path +from textwrap import dedent +from unittest import mock + +import pytest +from snowflake.cli.api.errno import DOES_NOT_EXIST_OR_NOT_AUTHORIZED +from snowflake.connector import ProgrammingError + +from tests_common import IS_WINDOWS + +if IS_WINDOWS: + pytest.skip("Requires further refactor to work on Windows", allow_module_level=True) + + +mock_session_has_warehouse = mock.patch( + "snowflake.cli.api.sql_execution.SqlExecutionMixin.session_has_warehouse", + lambda _: True, +) + + +@mock.patch("snowflake.connector.connect") +@mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager") +@mock_session_has_warehouse +def test_deploy_function( + mock_object_manager, + mock_connector, + mock_ctx, + runner, + project_directory, +): + mock_object_manager.return_value.describe.side_effect = ProgrammingError( + errno=DOES_NOT_EXIST_OR_NOT_AUTHORIZED + ) + ctx = mock_ctx() + mock_connector.return_value = ctx + with project_directory("snowpark_functions") as project_dir: + result = runner.invoke( + [ + "snowpark", + "deploy", + ], + catch_exceptions=False, + ) + + assert result.exit_code == 0, result.output + assert ctx.get_queries() == [ + "create stage if not exists IDENTIFIER('MockDatabase.MockSchema.dev_deployment') comment='deployments managed by Snowflake CLI'", + f"put file://{Path(project_dir).resolve()}/app.py @MockDatabase.MockSchema.dev_deployment/my_snowpark_project/" + f" auto_compress=false parallel=4 overwrite=True", + dedent( + """\ + create or replace function IDENTIFIER('MockDatabase.MockSchema.func1')(a string default 'default value', b variant) + copy grants + returns string + language python + runtime_version=3.10 + imports=('@MockDatabase.MockSchema.dev_deployment/my_snowpark_project/app.py') + handler='app.func1_handler' + packages=() + """ + ).strip(), + ] + + +@mock.patch("snowflake.connector.connect") +@mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager") +@mock_session_has_warehouse +def test_deploy_function_with_external_access( + mock_object_manager, + mock_connector, + mock_ctx, + runner, + project_directory, +): + mock_object_manager.return_value.show.return_value = [ + {"name": "external_1", "type": "EXTERNAL_ACCESS"}, + {"name": "external_2", "type": "EXTERNAL_ACCESS"}, + ] + mock_object_manager.return_value.describe.side_effect = ProgrammingError( + errno=DOES_NOT_EXIST_OR_NOT_AUTHORIZED + ) + ctx = mock_ctx() + mock_connector.return_value = ctx + + with project_directory("snowpark_function_external_access") as project_dir: + result = runner.invoke( + [ + "snowpark", + "deploy", + ], + catch_exceptions=False, + ) + + assert result.exit_code == 0, result.output + assert ctx.get_queries() == [ + "create stage if not exists IDENTIFIER('MockDatabase.MockSchema.dev_deployment') comment='deployments managed by Snowflake CLI'", + f"put file://{Path(project_dir).resolve()}/app.py @MockDatabase.MockSchema.dev_deployment/my_snowpark_project/" + f" auto_compress=false parallel=4 overwrite=True", + dedent( + """\ + create or replace function IDENTIFIER('MockDatabase.MockSchema.func1')(a string, b variant) + copy grants + returns string + language python + runtime_version=3.10 + imports=('@MockDatabase.MockSchema.dev_deployment/my_snowpark_project/app.py') + handler='app.func1_handler' + packages=() + external_access_integrations=(external_1, external_2) + secrets=('cred'=cred_name, 'other'=other_name) + """ + ).strip(), + ] + + +@mock.patch("snowflake.connector.connect") +@mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager") +@mock_session_has_warehouse +def test_deploy_function_secrets_without_external_access( + mock_object_manager, + mock_conn, + runner, + mock_ctx, + project_directory, + os_agnostic_snapshot, +): + mock_object_manager.return_value.show.return_value = [ + {"name": "external_1", "type": "EXTERNAL_ACCESS"}, + {"name": "external_2", "type": "EXTERNAL_ACCESS"}, + ] + ctx = mock_ctx() + mock_conn.return_value = ctx + + with project_directory("snowpark_function_secrets_without_external_access"): + result = runner.invoke( + [ + "snowpark", + "deploy", + ], + ) + + assert result.exit_code == 1, result.output + assert result.output == os_agnostic_snapshot + + +@mock.patch("snowflake.connector.connect") +@mock_session_has_warehouse +def test_deploy_function_no_changes( + mock_connector, + runner, + mock_ctx, + mock_cursor, + project_directory, +): + rows = [ + ("packages", '["foo==1.2.3", "bar>=3.0.0"]'), + ("handler", "app.func1_handler"), + ("returns", "string"), + ("imports", "dev_deployment/my_snowpark_project/app.py"), + ("runtime_version", "3.10"), + ] + + queries, result, project_dir = _deploy_function( + rows, + mock_connector, + runner, + mock_ctx, + mock_cursor, + project_directory, + "--replace", + ) + + assert result.exit_code == 0, result.output + assert json.loads(result.output) == [ + { + "object": "MockDatabase.MockSchema.func1(a string default 'default value', b variant)", + "status": "packages updated", + "type": "function", + } + ] + assert queries == [ + "create stage if not exists IDENTIFIER('MockDatabase.MockSchema.dev_deployment') comment='deployments managed by Snowflake CLI'", + f"put file://{Path(project_dir).resolve()}/app.py @MockDatabase.MockSchema.dev_deployment/my_snowpark_project/ auto_compress=false parallel=4 overwrite=True", + ] + + +@mock.patch("snowflake.connector.connect") +@mock_session_has_warehouse +def test_deploy_function_needs_update_because_packages_changes( + mock_connector, + runner, + mock_ctx, + mock_cursor, + project_directory, +): + rows = [ + ("packages", '["foo==1.2.3"]'), + ("handler", "main.py:app"), + ("returns", "table(variant)"), + ] + + queries, result, project_dir = _deploy_function( + rows, + mock_connector, + runner, + mock_ctx, + mock_cursor, + project_directory, + "--replace", + ) + + assert result.exit_code == 0, result.output + assert json.loads(result.output) == [ + { + "object": "MockDatabase.MockSchema.func1(a string default 'default value', b variant)", + "status": "definition updated", + "type": "function", + } + ] + assert queries == [ + "create stage if not exists IDENTIFIER('MockDatabase.MockSchema.dev_deployment') comment='deployments managed by Snowflake CLI'", + f"put file://{Path(project_dir).resolve()}/app.py @MockDatabase.MockSchema.dev_deployment/my_snowpark_project/ auto_compress=false parallel=4 overwrite=True", + dedent( + """\ + create or replace function IDENTIFIER('MockDatabase.MockSchema.func1')(a string default 'default value', b variant) + copy grants + returns string + language python + runtime_version=3.10 + imports=('@MockDatabase.MockSchema.dev_deployment/my_snowpark_project/app.py') + handler='app.func1_handler' + packages=('foo==1.2.3','bar>=3.0.0') + """ + ).strip(), + ] + + +@mock.patch("snowflake.connector.connect") +@mock_session_has_warehouse +def test_deploy_function_needs_update_because_handler_changes( + mock_connector, + runner, + mock_ctx, + mock_cursor, + project_directory, +): + rows = [ + ("packages", '["foo==1.2.3", "bar>=3.0.0"]'), + ("handler", "main.py:oldApp"), + ("returns", "table(variant)"), + ] + + queries, result, project_dir = _deploy_function( + rows, + mock_connector, + runner, + mock_ctx, + mock_cursor, + project_directory, + "--replace", + ) + + assert result.exit_code == 0, result.output + assert json.loads(result.output) == [ + { + "object": "MockDatabase.MockSchema.func1(a string default 'default value', b variant)", + "status": "definition updated", + "type": "function", + } + ] + assert queries == [ + "create stage if not exists IDENTIFIER('MockDatabase.MockSchema.dev_deployment') comment='deployments managed by Snowflake CLI'", + f"put file://{Path(project_dir).resolve()}/app.py @MockDatabase.MockSchema.dev_deployment/my_snowpark_project/" + f" auto_compress=false parallel=4 overwrite=True", + dedent( + """\ + create or replace function IDENTIFIER('MockDatabase.MockSchema.func1')(a string default 'default value', b variant) + copy grants + returns string + language python + runtime_version=3.10 + imports=('@MockDatabase.MockSchema.dev_deployment/my_snowpark_project/app.py') + handler='app.func1_handler' + packages=('foo==1.2.3','bar>=3.0.0') + """ + ).strip(), + ] + + +@mock.patch("snowflake.connector.connect") +@mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.describe") +@mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.show") +@mock_session_has_warehouse +def test_deploy_function_fully_qualified_name_duplicated_database( + mock_om_show, + mock_om_describe, + mock_conn, + runner, + mock_ctx, + project_directory, + alter_snowflake_yml, + os_agnostic_snapshot, +): + number_of_functions_in_project = 6 + mock_om_describe.side_effect = [ + ProgrammingError(errno=DOES_NOT_EXIST_OR_NOT_AUTHORIZED), + ] * number_of_functions_in_project + ctx = mock_ctx() + mock_conn.return_value = ctx + + with project_directory("snowpark_function_fully_qualified_name") as tmp_dir: + result = runner.invoke(["snowpark", "deploy"]) + assert result.output == os_agnostic_snapshot(name="database error") + + +@mock.patch("snowflake.connector.connect") +@mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.describe") +@mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.show") +@mock_session_has_warehouse +def test_deploy_function_fully_qualified_name_duplicated_schema( + mock_om_show, + mock_om_describe, + mock_conn, + runner, + mock_ctx, + project_directory, + alter_snowflake_yml, + os_agnostic_snapshot, +): + number_of_functions_in_project = 6 + mock_om_describe.side_effect = [ + ProgrammingError(errno=DOES_NOT_EXIST_OR_NOT_AUTHORIZED), + ] * number_of_functions_in_project + ctx = mock_ctx() + mock_conn.return_value = ctx + + with project_directory("snowpark_function_fully_qualified_name") as tmp_dir: + alter_snowflake_yml( + tmp_dir / "snowflake.yml", + parameter_path="snowpark.functions.5.name", + value="custom_schema.fqn_function_error", + ) + result = runner.invoke(["snowpark", "deploy"]) + assert result.output == os_agnostic_snapshot(name="schema error") + + +@mock.patch("snowflake.connector.connect") +@mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.describe") +@mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.show") +@mock_session_has_warehouse +def test_deploy_function_fully_qualified_name( + mock_om_show, + mock_om_describe, + mock_conn, + runner, + mock_ctx, + project_directory, + alter_snowflake_yml, + os_agnostic_snapshot, +): + number_of_functions_in_project = 6 + mock_om_describe.side_effect = [ + ProgrammingError(errno=DOES_NOT_EXIST_OR_NOT_AUTHORIZED), + ] * number_of_functions_in_project + ctx = mock_ctx() + mock_conn.return_value = ctx + + with project_directory("snowpark_function_fully_qualified_name") as tmp_dir: + alter_snowflake_yml( + tmp_dir / "snowflake.yml", + parameter_path="snowpark.functions.5.name", + value="fqn_function3", + ) + result = runner.invoke(["snowpark", "deploy"]) + assert result.exit_code == 0 + assert result.output == os_agnostic_snapshot(name="ok") + + +@pytest.mark.parametrize( + "parameter_type,default_value", + [ + ("string", None), + ("string", ""), + ("int", None), + ("variant", None), + ("bool", None), + ], +) +@mock.patch("snowflake.connector.connect") +@mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager") +@mock_session_has_warehouse +def test_deploy_function_with_empty_default_value( + mock_object_manager, + mock_connector, + mock_ctx, + runner, + project_directory, + alter_snowflake_yml, + parameter_type, + default_value, +): + mock_object_manager.return_value.describe.side_effect = ProgrammingError( + errno=DOES_NOT_EXIST_OR_NOT_AUTHORIZED + ) + ctx = mock_ctx() + mock_connector.return_value = ctx + with project_directory("snowpark_functions") as project_dir: + snowflake_yml = project_dir / "snowflake.yml" + for param, value in [("type", parameter_type), ("default", default_value)]: + alter_snowflake_yml( + snowflake_yml, + parameter_path=f"snowpark.functions.0.signature.0.{param}", + value=value, + ) + alter_snowflake_yml( + snowflake_yml, + parameter_path=f"snowpark.functions.0.runtime", + value="3.10", + ) + result = runner.invoke( + ["snowpark", "deploy", "--format", "json"], catch_exceptions=False + ) + default_value_json = default_value + if default_value is None: + default_value_json = "null" + elif parameter_type == "string": + default_value_json = f"'{default_value}'" + + assert result.exit_code == 0, result.output + assert json.loads(result.output) == [ + { + "object": f"MockDatabase.MockSchema.func1(a {parameter_type} default {default_value_json}, b variant)", + "status": "created", + "type": "function", + } + ] + + +@mock.patch("snowflake.connector.connect") +def test_execute_function(mock_connector, runner, mock_ctx): + ctx = mock_ctx() + mock_connector.return_value = ctx + result = runner.invoke( + [ + "snowpark", + "execute", + "function", + "functionName(42, 'string')", + ] + ) + + assert result.exit_code == 0, result.output + assert ctx.get_query() == "select functionName(42, 'string')" + + +def _deploy_function( + rows, + mock_connector, + runner, + mock_ctx, + mock_cursor, + project_directory, + *args, +): + ctx = mock_ctx(mock_cursor(rows=rows, columns=[])) + mock_connector.return_value = ctx + with ( + mock.patch( + "snowflake.cli._plugins.snowpark.commands.ObjectManager.describe" + ) as om_describe, + mock.patch( + "snowflake.cli._plugins.snowpark.commands.ObjectManager.show" + ) as om_show, + ): + om_describe.return_value = rows + + with project_directory("snowpark_functions") as temp_dir: + (Path(temp_dir) / "requirements.snowflake.txt").write_text( + "foo==1.2.3\nbar>=3.0.0" + ) + result = runner.invoke( + [ + "snowpark", + "deploy", + "--format", + "json", + *args, + ] + ) + queries = ctx.get_queries() + return queries, result, temp_dir diff --git a/tests/snowpark/test_models.py b/tests/snowpark/test_models.py index c1e847398a..b18dd9198c 100644 --- a/tests/snowpark/test_models.py +++ b/tests/snowpark/test_models.py @@ -86,3 +86,17 @@ def test_wheel_metadata_parsing(test_root_path): assert meta.name == "zendesk" assert meta.wheel_path == wheel_path.path assert meta.dependencies == ["httplib2", "simplejson"] + + +def test_raise_error_when_artifact_contains_asterix( + runner, project_directory, alter_snowflake_yml, os_agnostic_snapshot +): + with project_directory("glob_patterns") as tmp_dir: + alter_snowflake_yml( + tmp_dir / "snowflake.yml", "entities.hello_procedure.artifacts", ["src/*"] + ) + + result = runner.invoke(["snowpark", "build"]) + + assert result.exit_code == 1 + assert result.output == os_agnostic_snapshot diff --git a/tests/snowpark/test_procedure.py b/tests/snowpark/test_procedure.py index 84a23d0999..fc13e1e6ef 100644 --- a/tests/snowpark/test_procedure.py +++ b/tests/snowpark/test_procedure.py @@ -19,6 +19,9 @@ from unittest.mock import call import pytest +from snowflake.cli._plugins.snowpark.package_utils import ( + DownloadUnavailablePackagesResult, +) from snowflake.cli.api.constants import ObjectType from snowflake.cli.api.errno import DOES_NOT_EXIST_OR_NOT_AUTHORIZED from snowflake.cli.api.identifiers import FQN @@ -55,16 +58,21 @@ def test_deploy_function_no_procedure(runner, project_directory): @mock.patch("snowflake.connector.connect") @mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.describe") @mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.show") +@mock.patch( + "snowflake.cli._plugins.snowpark.package_utils.download_unavailable_packages" +) @mock_session_has_warehouse def test_deploy_procedure( + mock_download, mock_om_show, mock_om_describe, mock_conn, runner, mock_ctx, project_directory, + enable_snowpark_glob_support_feature_flag, ): - + mock_download.return_value = DownloadUnavailablePackagesResult() mock_om_describe.side_effect = ProgrammingError( errno=DOES_NOT_EXIST_OR_NOT_AUTHORIZED ) @@ -72,6 +80,15 @@ def test_deploy_procedure( mock_conn.return_value = ctx with project_directory("snowpark_procedures") as tmp: + result = runner.invoke( + [ + "snowpark", + "build", + "--ignore-anaconda", + ], + catch_exceptions=False, + ) + assert result.exit_code == 0, result.output result = runner.invoke( [ "snowpark", @@ -88,7 +105,7 @@ def test_deploy_procedure( ) assert ctx.get_queries() == [ "create stage if not exists IDENTIFIER('MockDatabase.MockSchema.dev_deployment') comment='deployments managed by Snowflake CLI'", - f"put file://{Path(tmp).resolve()}/app.py @MockDatabase.MockSchema.dev_deployment/my_snowpark_project/ auto_compress=false parallel=4 overwrite=True", + f"put file://{Path(tmp).resolve()}/output/my_snowpark_project/app.py @MockDatabase.MockSchema.dev_deployment/my_snowpark_project/ auto_compress=false parallel=4 overwrite=True", dedent( """\ create or replace procedure IDENTIFIER('MockDatabase.MockSchema.procedureName')(name string) @@ -119,15 +136,21 @@ def test_deploy_procedure( @mock.patch("snowflake.connector.connect") @mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.describe") @mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.show") +@mock.patch( + "snowflake.cli._plugins.snowpark.package_utils.download_unavailable_packages" +) @mock_session_has_warehouse def test_deploy_procedure_with_external_access( + mock_download, mock_om_show, mock_om_describe, mock_conn, runner, mock_ctx, project_directory, + enable_snowpark_glob_support_feature_flag, ): + mock_download.return_value = DownloadUnavailablePackagesResult() mock_om_describe.side_effect = ProgrammingError( errno=DOES_NOT_EXIST_OR_NOT_AUTHORIZED ) @@ -140,6 +163,15 @@ def test_deploy_procedure_with_external_access( mock_conn.return_value = ctx with project_directory("snowpark_procedure_external_access") as project_dir: + result = runner.invoke( + [ + "snowpark", + "build", + "--ignore-anaconda", + ], + catch_exceptions=False, + ) + assert result.exit_code == 0, result.output result = runner.invoke( [ "snowpark", @@ -158,7 +190,7 @@ def test_deploy_procedure_with_external_access( ) assert ctx.get_queries() == [ "create stage if not exists IDENTIFIER('MockDatabase.MockSchema.dev_deployment') comment='deployments managed by Snowflake CLI'", - f"put file://{Path(project_dir).resolve()}/app.py @MockDatabase.MockSchema.dev_deployment/my_snowpark_project/" + f"put file://{Path(project_dir).resolve()}/output/my_snowpark_project/app.py @MockDatabase.MockSchema.dev_deployment/my_snowpark_project/" f" auto_compress=false parallel=4 overwrite=True", dedent( """\ @@ -180,8 +212,12 @@ def test_deploy_procedure_with_external_access( @mock.patch("snowflake.connector.connect") @mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.describe") @mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.show") +@mock.patch( + "snowflake.cli._plugins.snowpark.package_utils.download_unavailable_packages" +) @mock_session_has_warehouse def test_deploy_procedure_secrets_without_external_access( + mock_download, mock_om_show, mock_om_describe, mock_conn, @@ -189,7 +225,9 @@ def test_deploy_procedure_secrets_without_external_access( mock_ctx, project_directory, os_agnostic_snapshot, + enable_snowpark_glob_support_feature_flag, ): + mock_download.return_value = DownloadUnavailablePackagesResult() ctx = mock_ctx() mock_conn.return_value = ctx @@ -199,6 +237,15 @@ def test_deploy_procedure_secrets_without_external_access( ] with project_directory("snowpark_procedure_secrets_without_external_access"): + result = runner.invoke( + [ + "snowpark", + "build", + "--ignore-anaconda", + ], + catch_exceptions=False, + ) + assert result.exit_code == 0, result.output result = runner.invoke( [ "snowpark", @@ -214,8 +261,12 @@ def test_deploy_procedure_secrets_without_external_access( @mock.patch("snowflake.connector.connect") @mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.describe") @mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.show") +@mock.patch( + "snowflake.cli._plugins.snowpark.package_utils.download_unavailable_packages" +) @mock_session_has_warehouse def test_deploy_procedure_fails_if_integration_does_not_exists( + mock_download, mock_om_show, mock_om_describe, mock_conn, @@ -223,7 +274,9 @@ def test_deploy_procedure_fails_if_integration_does_not_exists( mock_ctx, project_directory, os_agnostic_snapshot, + enable_snowpark_glob_support_feature_flag, ): + mock_download.return_value = DownloadUnavailablePackagesResult() ctx = mock_ctx() mock_conn.return_value = ctx @@ -232,6 +285,15 @@ def test_deploy_procedure_fails_if_integration_does_not_exists( ] with project_directory("snowpark_procedure_external_access"): + result = runner.invoke( + [ + "snowpark", + "build", + "--ignore-anaconda", + ], + catch_exceptions=False, + ) + assert result.exit_code == 0, result.output result = runner.invoke( [ "snowpark", @@ -250,8 +312,12 @@ def test_deploy_procedure_fails_if_integration_does_not_exists( @mock.patch("snowflake.connector.connect") @mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.describe") @mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.show") +@mock.patch( + "snowflake.cli._plugins.snowpark.package_utils.download_unavailable_packages" +) @mock_session_has_warehouse def test_deploy_procedure_fails_if_object_exists_and_no_replace( + mock_download, mock_om_show, mock_om_describe, mock_conn, @@ -261,7 +327,9 @@ def test_deploy_procedure_fails_if_object_exists_and_no_replace( mock_ctx, project_directory, os_agnostic_snapshot, + enable_snowpark_glob_support_feature_flag, ): + mock_download.return_value = DownloadUnavailablePackagesResult() mock_om_describe.return_value = mock_cursor( [ ("packages", "[]"), @@ -274,6 +342,15 @@ def test_deploy_procedure_fails_if_object_exists_and_no_replace( mock_conn.return_value = ctx with project_directory("snowpark_procedures"): + result = runner.invoke( + [ + "snowpark", + "build", + "--ignore-anaconda", + ], + catch_exceptions=False, + ) + assert result.exit_code == 0, result.output result = runner.invoke(["snowpark", "deploy"]) assert result.exit_code == 1 @@ -283,8 +360,12 @@ def test_deploy_procedure_fails_if_object_exists_and_no_replace( @mock.patch("snowflake.connector.connect") @mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.describe") @mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.show") +@mock.patch( + "snowflake.cli._plugins.snowpark.package_utils.download_unavailable_packages" +) @mock_session_has_warehouse def test_deploy_procedure_replace_nothing_to_update( + mock_download, mock_om_show, mock_om_describe, mock_conn, @@ -293,7 +374,9 @@ def test_deploy_procedure_replace_nothing_to_update( mock_ctx, project_directory, caplog, + enable_snowpark_glob_support_feature_flag, ): + mock_download.return_value = DownloadUnavailablePackagesResult() mock_om_describe.side_effect = [ mock_cursor( [ @@ -319,6 +402,15 @@ def test_deploy_procedure_replace_nothing_to_update( mock_conn.return_value = ctx with project_directory("snowpark_procedures"): + result = runner.invoke( + [ + "snowpark", + "build", + "--ignore-anaconda", + ], + catch_exceptions=False, + ) + assert result.exit_code == 0, result.output result = runner.invoke(["snowpark", "deploy", "--replace", "--format", "json"]) assert result.exit_code == 0, result.output @@ -339,8 +431,12 @@ def test_deploy_procedure_replace_nothing_to_update( @mock.patch("snowflake.connector.connect") @mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.describe") @mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.show") +@mock.patch( + "snowflake.cli._plugins.snowpark.package_utils.download_unavailable_packages" +) @mock_session_has_warehouse def test_deploy_procedure_replace_updates_single_object( + mock_download, mock_om_show, mock_om_describe, mock_conn, @@ -348,7 +444,9 @@ def test_deploy_procedure_replace_updates_single_object( mock_cursor, mock_ctx, project_directory, + enable_snowpark_glob_support_feature_flag, ): + mock_download.return_value = DownloadUnavailablePackagesResult() mock_om_describe.side_effect = [ mock_cursor( [ @@ -373,6 +471,15 @@ def test_deploy_procedure_replace_updates_single_object( mock_conn.return_value = ctx with project_directory("snowpark_procedures"): + result = runner.invoke( + [ + "snowpark", + "build", + "--ignore-anaconda", + ], + catch_exceptions=False, + ) + assert result.exit_code == 0, result.output result = runner.invoke(["snowpark", "deploy", "--replace", "--format", "json"]) assert result.exit_code == 0 @@ -393,8 +500,12 @@ def test_deploy_procedure_replace_updates_single_object( @mock.patch("snowflake.connector.connect") @mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.describe") @mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.show") +@mock.patch( + "snowflake.cli._plugins.snowpark.package_utils.download_unavailable_packages" +) @mock_session_has_warehouse def test_deploy_procedure_replace_creates_missing_object( + mock_download, mock_om_show, mock_om_describe, mock_conn, @@ -402,7 +513,9 @@ def test_deploy_procedure_replace_creates_missing_object( mock_cursor, mock_ctx, project_directory, + enable_snowpark_glob_support_feature_flag, ): + mock_download.return_value = DownloadUnavailablePackagesResult() mock_om_describe.side_effect = [ mock_cursor( [ @@ -419,9 +532,18 @@ def test_deploy_procedure_replace_creates_missing_object( mock_conn.return_value = ctx with project_directory("snowpark_procedures"): + result = runner.invoke( + [ + "snowpark", + "build", + "--ignore-anaconda", + ], + catch_exceptions=False, + ) + assert result.exit_code == 0, result.output result = runner.invoke(["snowpark", "deploy", "--replace", "--format", "json"]) - assert result.exit_code == 0 + assert result.exit_code == 0, result.output assert json.loads(result.output) == [ { "object": "MockDatabase.MockSchema.procedureName(name string)", @@ -439,8 +561,12 @@ def test_deploy_procedure_replace_creates_missing_object( @mock.patch("snowflake.connector.connect") @mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.describe") @mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.show") +@mock.patch( + "snowflake.cli._plugins.snowpark.package_utils.download_unavailable_packages" +) @mock_session_has_warehouse def test_deploy_procedure_fully_qualified_name( + mock_download, mock_om_show, mock_om_describe, mock_conn, @@ -449,7 +575,9 @@ def test_deploy_procedure_fully_qualified_name( project_directory, alter_snowflake_yml, os_agnostic_snapshot, + enable_snowpark_glob_support_feature_flag, ): + mock_download.return_value = DownloadUnavailablePackagesResult() number_of_procedures_in_projects = 6 mock_om_describe.side_effect = [ ProgrammingError(errno=DOES_NOT_EXIST_OR_NOT_AUTHORIZED), @@ -457,7 +585,16 @@ def test_deploy_procedure_fully_qualified_name( ctx = mock_ctx() mock_conn.return_value = ctx - with project_directory("snowpark_procedure_fully_qualified_name") as tmp_dir: + with project_directory("snowpark_procedure_fully_qualified_name"): + result = runner.invoke( + [ + "snowpark", + "build", + "--ignore-anaconda", + ], + catch_exceptions=False, + ) + assert result.exit_code == 0, result.output result = runner.invoke(["snowpark", "deploy"]) assert result.output == os_agnostic_snapshot(name="database error") @@ -465,8 +602,12 @@ def test_deploy_procedure_fully_qualified_name( @mock.patch("snowflake.connector.connect") @mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.describe") @mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.show") +@mock.patch( + "snowflake.cli._plugins.snowpark.package_utils.download_unavailable_packages" +) @mock_session_has_warehouse def test_deploy_procedure_fully_qualified_name_duplicated_schema( + mock_download, mock_om_show, mock_om_describe, mock_conn, @@ -475,7 +616,9 @@ def test_deploy_procedure_fully_qualified_name_duplicated_schema( project_directory, alter_snowflake_yml, os_agnostic_snapshot, + enable_snowpark_glob_support_feature_flag, ): + mock_download.return_value = DownloadUnavailablePackagesResult() number_of_procedures_in_projects = 6 mock_om_describe.side_effect = [ ProgrammingError(errno=DOES_NOT_EXIST_OR_NOT_AUTHORIZED), @@ -489,6 +632,15 @@ def test_deploy_procedure_fully_qualified_name_duplicated_schema( parameter_path="snowpark.procedures.5.name", value="custom_schema.fqn_procedure_error", ) + result = runner.invoke( + [ + "snowpark", + "build", + "--ignore-anaconda", + ], + catch_exceptions=False, + ) + assert result.exit_code == 0, result.output result = runner.invoke(["snowpark", "deploy"]) assert result.output == os_agnostic_snapshot(name="schema error") @@ -506,8 +658,12 @@ def test_deploy_procedure_fully_qualified_name_duplicated_schema( @mock.patch("snowflake.connector.connect") @mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.describe") @mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.show") +@mock.patch( + "snowflake.cli._plugins.snowpark.package_utils.download_unavailable_packages" +) @mock_session_has_warehouse def test_deploy_procedure_with_empty_default_value( + mock_download, mock_om_show, mock_om_describe, mock_conn, @@ -517,7 +673,9 @@ def test_deploy_procedure_with_empty_default_value( alter_snowflake_yml, parameter_type, default_value, + enable_snowpark_glob_support_feature_flag, ): + mock_download.return_value = DownloadUnavailablePackagesResult() mock_om_describe.side_effect = ProgrammingError( errno=DOES_NOT_EXIST_OR_NOT_AUTHORIZED ) @@ -532,6 +690,16 @@ def test_deploy_procedure_with_empty_default_value( parameter_path=f"snowpark.procedures.0.signature.0.{param}", value=value, ) + + result = runner.invoke( + [ + "snowpark", + "build", + "--ignore-anaconda", + ], + catch_exceptions=False, + ) + assert result.exit_code == 0, result.output result = runner.invoke(["snowpark", "deploy", "--format", "json"]) default_value_json = default_value diff --git a/tests/snowpark/test_procedure_old_build.py b/tests/snowpark/test_procedure_old_build.py new file mode 100644 index 0000000000..2f1c817576 --- /dev/null +++ b/tests/snowpark/test_procedure_old_build.py @@ -0,0 +1,555 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +from pathlib import Path +from textwrap import dedent +from unittest import mock +from unittest.mock import call + +import pytest +from snowflake.cli.api.constants import ObjectType +from snowflake.cli.api.errno import DOES_NOT_EXIST_OR_NOT_AUTHORIZED +from snowflake.cli.api.identifiers import FQN +from snowflake.connector import ProgrammingError + +from tests_common import IS_WINDOWS + +if IS_WINDOWS: + pytest.skip("Requires further refactor to work on Windows", allow_module_level=True) + + +mock_session_has_warehouse = mock.patch( + "snowflake.cli.api.sql_execution.SqlExecutionMixin.session_has_warehouse", + lambda _: True, +) + + +@mock_session_has_warehouse +def test_deploy_function_no_procedure(runner, project_directory): + with project_directory("empty_project"): + result = runner.invoke( + [ + "snowpark", + "deploy", + ], + ) + assert result.exit_code == 1 + assert ( + "No procedures or functions were specified in the project definition." + in result.output + ) + + +@mock.patch("snowflake.connector.connect") +@mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.describe") +@mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.show") +@mock_session_has_warehouse +def test_deploy_procedure( + mock_om_show, + mock_om_describe, + mock_conn, + runner, + mock_ctx, + project_directory, +): + + mock_om_describe.side_effect = ProgrammingError( + errno=DOES_NOT_EXIST_OR_NOT_AUTHORIZED + ) + ctx = mock_ctx() + mock_conn.return_value = ctx + + with project_directory("snowpark_procedures") as tmp: + result = runner.invoke( + [ + "snowpark", + "deploy", + ] + ) + + assert result.exit_code == 0, result.output + mock_om_describe.return_value( + [ + call(object_type=str(ObjectType.PROCEDURE), name="procedureName(string)"), + call(object_type=str(ObjectType.PROCEDURE), name="test()"), + ] + ) + assert ctx.get_queries() == [ + "create stage if not exists IDENTIFIER('MockDatabase.MockSchema.dev_deployment') comment='deployments managed by Snowflake CLI'", + f"put file://{Path(tmp).resolve()}/app.py @MockDatabase.MockSchema.dev_deployment/my_snowpark_project/ auto_compress=false parallel=4 overwrite=True", + dedent( + """\ + create or replace procedure IDENTIFIER('MockDatabase.MockSchema.procedureName')(name string) + copy grants + returns string + language python + runtime_version=3.10 + imports=('@MockDatabase.MockSchema.dev_deployment/my_snowpark_project/app.py') + handler='hello' + packages=() + """ + ).strip(), + dedent( + """\ + create or replace procedure IDENTIFIER('MockDatabase.MockSchema.test')() + copy grants + returns string + language python + runtime_version=3.10 + imports=('@MockDatabase.MockSchema.dev_deployment/my_snowpark_project/app.py') + handler='test' + packages=() + """ + ).strip(), + ] + + +@mock.patch("snowflake.connector.connect") +@mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.describe") +@mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.show") +@mock_session_has_warehouse +def test_deploy_procedure_with_external_access( + mock_om_show, + mock_om_describe, + mock_conn, + runner, + mock_ctx, + project_directory, +): + mock_om_describe.side_effect = ProgrammingError( + errno=DOES_NOT_EXIST_OR_NOT_AUTHORIZED + ) + mock_om_show.return_value = [ + {"name": "external_1", "type": "EXTERNAL_ACCESS"}, + {"name": "external_2", "type": "EXTERNAL_ACCESS"}, + ] + + ctx = mock_ctx() + mock_conn.return_value = ctx + + with project_directory("snowpark_procedure_external_access") as project_dir: + result = runner.invoke( + [ + "snowpark", + "deploy", + ] + ) + + assert result.exit_code == 0, result.output + mock_om_describe.assert_has_calls( + [ + call( + object_type=str(ObjectType.PROCEDURE), + fqn=FQN.from_string("MockDatabase.MockSchema.procedureName(string)"), + ), + ] + ) + assert ctx.get_queries() == [ + "create stage if not exists IDENTIFIER('MockDatabase.MockSchema.dev_deployment') comment='deployments managed by Snowflake CLI'", + f"put file://{Path(project_dir).resolve()}/app.py @MockDatabase.MockSchema.dev_deployment/my_snowpark_project/" + f" auto_compress=false parallel=4 overwrite=True", + dedent( + """\ + create or replace procedure IDENTIFIER('MockDatabase.MockSchema.procedureName')(name string) + copy grants + returns string + language python + runtime_version=3.10 + imports=('@MockDatabase.MockSchema.dev_deployment/my_snowpark_project/app.py') + handler='app.hello' + packages=() + external_access_integrations=(external_1, external_2) + secrets=('cred'=cred_name, 'other'=other_name) + """ + ).strip(), + ] + + +@mock.patch("snowflake.connector.connect") +@mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.describe") +@mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.show") +@mock_session_has_warehouse +def test_deploy_procedure_secrets_without_external_access( + mock_om_show, + mock_om_describe, + mock_conn, + runner, + mock_ctx, + project_directory, + os_agnostic_snapshot, +): + ctx = mock_ctx() + mock_conn.return_value = ctx + + mock_om_show.return_value = [ + {"name": "external_1", "type": "EXTERNAL_ACCESS"}, + {"name": "external_2", "type": "EXTERNAL_ACCESS"}, + ] + + with project_directory("snowpark_procedure_secrets_without_external_access"): + result = runner.invoke( + [ + "snowpark", + "deploy", + ], + catch_exceptions=False, + ) + + assert result.exit_code == 1, result.output + assert result.output == os_agnostic_snapshot + + +@mock.patch("snowflake.connector.connect") +@mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.describe") +@mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.show") +@mock_session_has_warehouse +def test_deploy_procedure_fails_if_integration_does_not_exists( + mock_om_show, + mock_om_describe, + mock_conn, + runner, + mock_ctx, + project_directory, + os_agnostic_snapshot, +): + ctx = mock_ctx() + mock_conn.return_value = ctx + + mock_om_show.return_value = [ + {"name": "external_1", "type": "EXTERNAL_ACCESS"}, + ] + + with project_directory("snowpark_procedure_external_access"): + result = runner.invoke( + [ + "snowpark", + "deploy", + ], + catch_exceptions=False, + ) + + assert result.exit_code == 1, result.output + assert result.output == os_agnostic_snapshot + + +@mock.patch( + "snowflake.cli._plugins.snowpark.commands._check_if_all_defined_integrations_exists" +) +@mock.patch("snowflake.connector.connect") +@mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.describe") +@mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.show") +@mock_session_has_warehouse +def test_deploy_procedure_fails_if_object_exists_and_no_replace( + mock_om_show, + mock_om_describe, + mock_conn, + _, + runner, + mock_cursor, + mock_ctx, + project_directory, + os_agnostic_snapshot, +): + mock_om_describe.return_value = mock_cursor( + [ + ("packages", "[]"), + ("handler", "hello"), + ("returns", "string"), + ], + columns=["key", "value"], + ) + ctx = mock_ctx() + mock_conn.return_value = ctx + + with project_directory("snowpark_procedures"): + result = runner.invoke(["snowpark", "deploy"]) + + assert result.exit_code == 1 + assert result.output == os_agnostic_snapshot + + +@mock.patch("snowflake.connector.connect") +@mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.describe") +@mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.show") +@mock_session_has_warehouse +def test_deploy_procedure_replace_nothing_to_update( + mock_om_show, + mock_om_describe, + mock_conn, + runner, + mock_cursor, + mock_ctx, + project_directory, + caplog, +): + mock_om_describe.side_effect = [ + mock_cursor( + [ + ("packages", "[]"), + ("handler", "hello"), + ("returns", "string"), + ("imports", "dev_deployment/my_snowpark_project/app.py"), + ], + columns=["key", "value"], + ), + mock_cursor( + [ + ("packages", "[]"), + ("handler", "test"), + ("returns", "string"), + ("imports", "dev_deployment/my_snowpark_project/app.py"), + ("runtime_version", "3.10"), + ], + columns=["key", "value"], + ), + ] + ctx = mock_ctx() + mock_conn.return_value = ctx + + with project_directory("snowpark_procedures"): + result = runner.invoke(["snowpark", "deploy", "--replace", "--format", "json"]) + + assert result.exit_code == 0, result.output + assert json.loads(result.output) == [ + { + "object": "MockDatabase.MockSchema.procedureName(name string)", + "status": "packages updated", + "type": "procedure", + }, + { + "object": "MockDatabase.MockSchema.test()", + "status": "packages updated", + "type": "procedure", + }, + ] + + +@mock.patch("snowflake.connector.connect") +@mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.describe") +@mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.show") +@mock_session_has_warehouse +def test_deploy_procedure_replace_updates_single_object( + mock_om_show, + mock_om_describe, + mock_conn, + runner, + mock_cursor, + mock_ctx, + project_directory, +): + mock_om_describe.side_effect = [ + mock_cursor( + [ + ("packages", "[]"), + ("handler", "hello"), + ("returns", "string"), + ("imports", "dev_deployment/my_snowpark_project/app.py"), + ], + columns=["key", "value"], + ), + mock_cursor( + [ + ("packages", "[]"), + ("handler", "foo"), + ("returns", "string"), + ("imports", "dev_deployment/my_snowpark_project/app.zip"), + ], + columns=["key", "value"], + ), + ] + ctx = mock_ctx() + mock_conn.return_value = ctx + + with project_directory("snowpark_procedures"): + result = runner.invoke(["snowpark", "deploy", "--replace", "--format", "json"]) + + assert result.exit_code == 0 + assert json.loads(result.output) == [ + { + "object": "MockDatabase.MockSchema.procedureName(name string)", + "status": "packages updated", + "type": "procedure", + }, + { + "object": "MockDatabase.MockSchema.test()", + "status": "definition updated", + "type": "procedure", + }, + ] + + +@mock.patch("snowflake.connector.connect") +@mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.describe") +@mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.show") +@mock_session_has_warehouse +def test_deploy_procedure_replace_creates_missing_object( + mock_om_show, + mock_om_describe, + mock_conn, + runner, + mock_cursor, + mock_ctx, + project_directory, +): + mock_om_describe.side_effect = [ + mock_cursor( + [ + ("packages", "[]"), + ("handler", "hello"), + ("returns", "string"), + ("imports", "dev_deployment/my_snowpark_project/app.py"), + ], + columns=["key", "value"], + ), + ProgrammingError(errno=DOES_NOT_EXIST_OR_NOT_AUTHORIZED), + ] + ctx = mock_ctx() + mock_conn.return_value = ctx + + with project_directory("snowpark_procedures"): + result = runner.invoke(["snowpark", "deploy", "--replace", "--format", "json"]) + + assert result.exit_code == 0 + assert json.loads(result.output) == [ + { + "object": "MockDatabase.MockSchema.procedureName(name string)", + "status": "packages updated", + "type": "procedure", + }, + { + "object": "MockDatabase.MockSchema.test()", + "status": "created", + "type": "procedure", + }, + ] + + +@mock.patch("snowflake.connector.connect") +@mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.describe") +@mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.show") +@mock_session_has_warehouse +def test_deploy_procedure_fully_qualified_name( + mock_om_show, + mock_om_describe, + mock_conn, + runner, + mock_ctx, + project_directory, + alter_snowflake_yml, + os_agnostic_snapshot, +): + number_of_procedures_in_projects = 6 + mock_om_describe.side_effect = [ + ProgrammingError(errno=DOES_NOT_EXIST_OR_NOT_AUTHORIZED), + ] * number_of_procedures_in_projects + ctx = mock_ctx() + mock_conn.return_value = ctx + + with project_directory("snowpark_procedure_fully_qualified_name") as tmp_dir: + result = runner.invoke(["snowpark", "deploy"]) + assert result.output == os_agnostic_snapshot(name="database error") + + +@mock.patch("snowflake.connector.connect") +@mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.describe") +@mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.show") +@mock_session_has_warehouse +def test_deploy_procedure_fully_qualified_name_duplicated_schema( + mock_om_show, + mock_om_describe, + mock_conn, + runner, + mock_ctx, + project_directory, + alter_snowflake_yml, + os_agnostic_snapshot, +): + number_of_procedures_in_projects = 6 + mock_om_describe.side_effect = [ + ProgrammingError(errno=DOES_NOT_EXIST_OR_NOT_AUTHORIZED), + ] * number_of_procedures_in_projects + ctx = mock_ctx() + mock_conn.return_value = ctx + + with project_directory("snowpark_procedure_fully_qualified_name") as tmp_dir: + alter_snowflake_yml( + tmp_dir / "snowflake.yml", + parameter_path="snowpark.procedures.5.name", + value="custom_schema.fqn_procedure_error", + ) + result = runner.invoke(["snowpark", "deploy"]) + assert result.output == os_agnostic_snapshot(name="schema error") + + +@pytest.mark.parametrize( + "parameter_type,default_value", + [ + ("string", None), + ("string", ""), + ("int", None), + ("variant", None), + ("bool", None), + ], +) +@mock.patch("snowflake.connector.connect") +@mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.describe") +@mock.patch("snowflake.cli._plugins.snowpark.commands.ObjectManager.show") +@mock_session_has_warehouse +def test_deploy_procedure_with_empty_default_value( + mock_om_show, + mock_om_describe, + mock_conn, + runner, + mock_ctx, + project_directory, + alter_snowflake_yml, + parameter_type, + default_value, +): + mock_om_describe.side_effect = ProgrammingError( + errno=DOES_NOT_EXIST_OR_NOT_AUTHORIZED + ) + ctx = mock_ctx() + mock_conn.return_value = ctx + + with project_directory("snowpark_procedures") as project_dir: + snowflake_yml = project_dir / "snowflake.yml" + for param, value in [("type", parameter_type), ("default", default_value)]: + alter_snowflake_yml( + snowflake_yml, + parameter_path=f"snowpark.procedures.0.signature.0.{param}", + value=value, + ) + result = runner.invoke(["snowpark", "deploy", "--format", "json"]) + + default_value_json = default_value + if default_value is None: + default_value_json = "null" + elif parameter_type == "string": + default_value_json = f"'{default_value}'" + + assert result.exit_code == 0, result.output + assert json.loads(result.output) == [ + { + "object": f"MockDatabase.MockSchema.procedureName(name {parameter_type} default {default_value_json})", + "status": "created", + "type": "procedure", + }, + { + "object": "MockDatabase.MockSchema.test()", + "status": "created", + "type": "procedure", + }, + ] diff --git a/tests/snowpark/test_project_paths.py b/tests/snowpark/test_project_paths.py new file mode 100644 index 0000000000..55796e1a98 --- /dev/null +++ b/tests/snowpark/test_project_paths.py @@ -0,0 +1,229 @@ +from pathlib import Path +from unittest import mock + +import pytest +from snowflake.cli._plugins.snowpark.snowpark_project_paths import Artefact + + +@pytest.mark.parametrize( + "path, dest, is_file, expected_path", + [ + ("src", None, False, "@db.public.stage/src.zip"), + ("src/", None, False, "@db.public.stage/src.zip"), + ("src", "source", False, "@db.public.stage/source/src.zip"), + ("src/app.py", None, True, "@db.public.stage/src/app.py"), + ("src/app.py", "source/new_app.py", True, "@db.public.stage/source/new_app.py"), + ("src/dir/dir2/app.py", None, True, "@db.public.stage/src/dir/dir2/app.py"), + ("src/dir/dir2/app.py", "source/", True, "@db.public.stage/source/app.py"), + ("src/*", "source/", False, "@db.public.stage/source/src.zip"), + ("src/**/*.py", None, False, "@db.public.stage/src.zip"), + ("src/**/*.py", "source/", False, "@db.public.stage/source/src.zip"), + ("src/app*", None, False, "@db.public.stage/src.zip"), + ("src/app[1-5].py", None, False, "@db.public.stage/src.zip"), + ], +) +@mock.patch("snowflake.cli.api.cli_global_context.get_cli_context") +def test_artifact_import_path(mock_ctx_context, path, dest, is_file, expected_path): + mock_connection = mock.Mock() + mock_connection.database = "db" + mock_connection.schema = "public" + mock_ctx_context.return_value.connection = mock_connection + stage = "stage" + + with mock.patch.object(Path, "is_file" if is_file else "is_dir", return_value=True): + import_path = Artefact(Path(), Path(path), dest).import_path(stage) + + assert import_path == expected_path + + +@pytest.mark.parametrize( + "path, dest, is_file, expected_path", + [ + ("src", None, False, "@db.public.stage/"), + ("src/", None, False, "@db.public.stage/"), + ("src", "source", False, "@db.public.stage/source/"), + ("src/app.py", None, True, "@db.public.stage/src/"), + ("src/app.py", "source/new_app.py", True, "@db.public.stage/source/"), + ("src/dir/dir2/app.py", None, True, "@db.public.stage/src/dir/dir2/"), + ("src/dir/dir2/app.py", "source/", True, "@db.public.stage/source/"), + ("src/*", "source/", False, "@db.public.stage/source/"), + ("src/**/*.py", None, False, "@db.public.stage/"), + ("src/**/*.py", "source/", False, "@db.public.stage/source/"), + ("src/app*", None, False, "@db.public.stage/"), + ("src/app[1-5].py", None, False, "@db.public.stage/"), + ], +) +@mock.patch("snowflake.cli.api.cli_global_context.get_cli_context") +def test_artifact_upload_path(mock_ctx_context, path, dest, is_file, expected_path): + mock_connection = mock.Mock() + mock_connection.database = "db" + mock_connection.schema = "public" + mock_ctx_context.return_value.connection = mock_connection + + with mock.patch.object(Path, "is_file" if is_file else "is_dir", return_value=True): + upload_path = Artefact(Path(), Path(path), dest).upload_path("stage") + + assert upload_path == expected_path + + +@pytest.mark.parametrize( + "path, dest, is_file, expected_path", + [ + ("src", None, False, Path("output") / "src.zip"), + ("src/", None, False, Path("output") / "src.zip"), + ("src", "source", False, Path("output") / "source" / "src.zip"), + ("src/app.py", None, True, Path("output") / "src" / "app.py"), + ( + "src/app.py", + "source/new_app.py", + True, + Path("output") / "source" / "new_app.py", + ), + ("src/*", "source/new_app.py", True, Path("output") / "source" / "new_app.py"), + ( + "src/dir/dir2/app.py", + None, + True, + Path("output") / "src" / "dir" / "dir2" / "app.py", + ), + ( + "src/dir/dir2/app.py", + "source/", + True, + Path("output") / "source" / "app.py", + ), + ("src/*", "source/", False, Path("output") / "source" / "src.zip"), + ("src/**/*.py", None, False, Path("output") / "src.zip"), + ("src/**/*.py", "source/", False, Path("output") / "source" / "src.zip"), + ("src/app*", None, False, Path("output") / "src.zip"), + ("src/app[1-5].py", None, False, Path("output") / "src.zip"), + ], +) +def test_artifact_post_build_path(path, dest, is_file, expected_path): + with mock.patch.object(Path, "is_file" if is_file else "is_dir", return_value=True): + post_build_path = Artefact(Path(), Path(path), dest).post_build_path + + assert post_build_path == expected_path + + +@pytest.mark.parametrize( + "path, dest, is_file, expected_path", + [ + ("src", None, False, "@db.public.stage/src.zip"), + ("src/", None, False, "@db.public.stage/src.zip"), + ("src", "source", False, "@db.public.stage/source/src.zip"), + ("src/app.py", None, True, "@db.public.stage/src/app.py"), + ("src/app.py", "source/new_app.py", True, "@db.public.stage/source/new_app.py"), + ("src/dir/dir2/app.py", None, True, "@db.public.stage/src/dir/dir2/app.py"), + ("src/dir/dir2/app.py", "source/", True, "@db.public.stage/source/app.py"), + ("src/*", "source/", False, "@db.public.stage/source/src.zip"), + ("src/**/*.py", None, False, "@db.public.stage/src.zip"), + ("src/**/*.py", "source/", False, "@db.public.stage/source/src.zip"), + ("src/app*", None, False, "@db.public.stage/src.zip"), + ("src/app[1-5].py", None, False, "@db.public.stage/src.zip"), + ], +) +@mock.patch("snowflake.cli.api.cli_global_context.get_cli_context") +def test_artifact_import_path_from_other_directory( + mock_ctx_context, path, dest, is_file, expected_path +): + mock_connection = mock.Mock() + mock_connection.database = "db" + mock_connection.schema = "public" + mock_ctx_context.return_value.connection = mock_connection + stage = "stage" + + with mock.patch.object(Path, "is_file" if is_file else "is_dir", return_value=True): + import_path = Artefact(Path("/tmp"), Path(path), dest).import_path(stage) + + assert import_path == expected_path + + +@pytest.mark.parametrize( + "path, dest, is_file, expected_path", + [ + ("src", None, False, "@db.public.stage/"), + ("src/", None, False, "@db.public.stage/"), + ("src", "source", False, "@db.public.stage/source/"), + ("src/app.py", None, True, "@db.public.stage/src/"), + ("src/app.py", "source/new_app.py", True, "@db.public.stage/source/"), + ("src/dir/dir2/app.py", None, True, "@db.public.stage/src/dir/dir2/"), + ("src/dir/dir2/app.py", "source/", True, "@db.public.stage/source/"), + ("src/*", "source/", False, "@db.public.stage/source/"), + ("src/**/*.py", None, False, "@db.public.stage/"), + ("src/**/*.py", "source/", False, "@db.public.stage/source/"), + ("src/app*", None, False, "@db.public.stage/"), + ("src/app[1-5].py", None, False, "@db.public.stage/"), + ], +) +@mock.patch("snowflake.cli.api.cli_global_context.get_cli_context") +def test_artifact_upload_path_from_other_directory( + mock_ctx_context, path, dest, is_file, expected_path +): + mock_connection = mock.Mock() + mock_connection.database = "db" + mock_connection.schema = "public" + mock_ctx_context.return_value.connection = mock_connection + + with mock.patch.object(Path, "is_file" if is_file else "is_dir", return_value=True): + upload_path = Artefact(Path("/tmp"), Path(path), dest).upload_path("stage") + + assert upload_path == expected_path + + +@pytest.mark.parametrize( + "path, dest, is_file, expected_path", + [ + ("src", None, False, Path.cwd().absolute() / "output" / "src.zip"), + ("src/", None, False, Path.cwd().absolute() / "output" / "src.zip"), + ( + "src", + "source", + False, + Path.cwd().absolute() / "output" / "source" / "src.zip", + ), + ("src/app.py", None, True, Path.cwd().absolute() / "output" / "src" / "app.py"), + ( + "src/app.py", + "source/new_app.py", + True, + Path.cwd().absolute() / "output" / "source" / "new_app.py", + ), + ( + "src/dir/dir2/app.py", + None, + True, + Path.cwd().absolute() / "output" / "src" / "dir" / "dir2" / "app.py", + ), + ( + "src/dir/dir2/app.py", + "source/", + True, + Path.cwd().absolute() / "output" / "source" / "app.py", + ), + ( + "src/*", + "source/", + False, + Path.cwd().absolute() / "output" / "source" / "src.zip", + ), + ("src/**/*.py", None, False, Path.cwd().absolute() / "output" / "src.zip"), + ( + "src/**/*.py", + "source/", + False, + Path.cwd().absolute() / "output" / "source" / "src.zip", + ), + ("src/app*", None, False, Path.cwd().absolute() / "output" / "src.zip"), + ("src/app[1-5].py", None, False, Path.cwd().absolute() / "output" / "src.zip"), + ], +) +def test_artifact_post_build_path_from_other_directory( + path, dest, is_file, expected_path +): + with mock.patch.object(Path, "is_file" if is_file else "is_dir", return_value=True): + post_build_path = Artefact( + Path.cwd().absolute(), Path(path), dest + ).post_build_path + + assert post_build_path == expected_path diff --git a/tests/streamlit/test_artifacts.py b/tests/streamlit/test_artifacts.py new file mode 100644 index 0000000000..4b75406d1c --- /dev/null +++ b/tests/streamlit/test_artifacts.py @@ -0,0 +1,316 @@ +import os +from pathlib import Path +from unittest import mock + +import pytest +from snowflake.cli._plugins.connection.util import UIParameter +from snowflake.connector.compat import IS_WINDOWS + + +@pytest.mark.parametrize( + "artifacts, paths", + [ + ( + "src", + [ + {"local": Path("output") / "src" / "app.py", "stage": "/src"}, + { + "local": Path("output") / "src" / "dir" / "dir_app.py", + "stage": "/src/dir", + }, + ], + ), + ( + "src/", + [ + {"local": Path("output") / "src" / "app.py", "stage": "/src"}, + { + "local": Path("output") / "src" / "dir" / "dir_app.py", + "stage": "/src/dir", + }, + ], + ), + ( + "src/*", + [ + {"local": Path("output") / "src" / "app.py", "stage": "/src"}, + { + "local": Path("output") / "src" / "dir" / "dir_app.py", + "stage": "/src/dir", + }, + ], + ), + ("src/*.py", [{"local": Path("output") / "src" / "app.py", "stage": "/src"}]), + ( + "src/dir/dir_app.py", + [ + { + "local": Path("output") / "src" / "dir" / "dir_app.py", + "stage": "/src/dir", + } + ], + ), + ( + {"src": "src/**/*", "dest": "source/"}, + [ + {"local": Path("output") / "source" / "app.py", "stage": "/source"}, + {"local": Path("output") / "source" / "dir_app.py", "stage": "/source"}, + { + "local": Path("output") / "source" / "dir" / "dir_app.py", + "stage": "/source/dir", + }, + ], + ), + ( + {"src": "src", "dest": "source/"}, + [ + { + "local": Path("output") / "source" / "src" / "app.py", + "stage": "/source/src", + }, + { + "local": Path("output") / "source" / "src" / "dir" / "dir_app.py", + "stage": "/source/src/dir", + }, + ], + ), + ( + {"src": "src/", "dest": "source/"}, + [ + { + "local": Path("output") / "source" / "src" / "app.py", + "stage": "/source/src", + }, + { + "local": Path("output") / "source" / "src" / "dir" / "dir_app.py", + "stage": "/source/src/dir", + }, + ], + ), + ( + {"src": "src/*", "dest": "source/"}, + [ + {"local": Path("output") / "source" / "app.py", "stage": "/source"}, + { + "local": Path("output") / "source" / "dir" / "dir_app.py", + "stage": "/source/dir", + }, + ], + ), + ( + {"src": "src/dir/dir_app.py", "dest": "source/dir/apps/"}, + [ + { + "local": Path("output") / "source" / "dir" / "apps" / "dir_app.py", + "stage": "/source/dir/apps", + } + ], + ), + ], +) +@mock.patch("snowflake.connector.connect") +@mock.patch("snowflake.cli._plugins.snowpark.commands.StageManager.put") +@mock.patch( + "snowflake.cli._plugins.connection.util.get_ui_parameters", + return_value={UIParameter.NA_ENABLE_REGIONLESS_REDIRECT: "false"}, +) +def test_deploy_with_artifacts( + mock_param, + mock_sm_put, + mock_conn, + mock_cursor, + runner, + mock_ctx, + project_directory, + alter_snowflake_yml, + artifacts, + paths, +): + ctx = mock_ctx( + mock_cursor( + rows=[ + {"SYSTEM$GET_SNOWSIGHT_HOST()": "https://snowsight.domain"}, + {"CURRENT_ACCOUNT_NAME()": "my_account"}, + ], + columns=["SYSTEM$GET_SNOWSIGHT_HOST()"], + ) + ) + mock_conn.return_value = ctx + + streamlit_files = [ + "streamlit_app.py", + "pages/my_page.py", + "environment.yml", + ] + + with project_directory("glob_patterns") as tmp: + alter_snowflake_yml( + tmp / "snowflake.yml", + "entities.my_streamlit.artifacts", + streamlit_files + [artifacts], + ) + + result = runner.invoke( + [ + "streamlit", + "deploy", + "--replace", + ] + ) + assert result.exit_code == 0, result.output + + put_calls = _extract_put_calls(mock_sm_put) + # Windows needs absolute paths. + if IS_WINDOWS: + tmp_path = tmp.absolute() + else: + tmp_path = tmp.resolve() + for path in paths: + assert { + "local_path": tmp_path / path["local"], + "stage_path": "@MockDatabase.MockSchema.streamlit/test_streamlit_deploy_snowcli" + + path["stage"], + } in put_calls + + +@pytest.mark.parametrize( + "artifacts, paths", + [ + ( + "src", + [ + {"local": "output/src/app.py", "stage": "/src"}, + {"local": "output/src/dir/dir_app.py", "stage": "/src/dir"}, + ], + ), + ( + "src/", + [ + {"local": "output/src/app.py", "stage": "/src"}, + {"local": "output/src/dir/dir_app.py", "stage": "/src/dir"}, + ], + ), + ( + "src/*", + [ + {"local": "output/src/app.py", "stage": "/src"}, + {"local": "output/src/dir/dir_app.py", "stage": "/src/dir"}, + ], + ), + ("src/*.py", [{"local": "output/src/app.py", "stage": "/src"}]), + ( + "src/dir/dir_app.py", + [{"local": "output/src/dir/dir_app.py", "stage": "/src/dir"}], + ), + ( + {"src": "src/**/*", "dest": "source/"}, + [ + {"local": "output/source/app.py", "stage": "/source"}, + {"local": "output/source/dir_app.py", "stage": "/source"}, + {"local": "output/source/dir/dir_app.py", "stage": "/source/dir"}, + ], + ), + ( + {"src": "src", "dest": "source/"}, + [ + {"local": "output/source/src/app.py", "stage": "/source/src"}, + { + "local": "output/source/src/dir/dir_app.py", + "stage": "/source/src/dir", + }, + ], + ), + ( + {"src": "src/", "dest": "source/"}, + [ + {"local": "output/source/src/app.py", "stage": "/source/src"}, + { + "local": "output/source/src/dir/dir_app.py", + "stage": "/source/src/dir", + }, + ], + ), + ( + {"src": "src/*", "dest": "source/"}, + [ + {"local": "output/source/app.py", "stage": "/source"}, + {"local": "output/source/dir/dir_app.py", "stage": "/source/dir"}, + ], + ), + ( + {"src": "src/dir/dir_app.py", "dest": "source/dir/apps/"}, + [ + { + "local": "output/source/dir/apps/dir_app.py", + "stage": "/source/dir/apps", + } + ], + ), + ], +) +@mock.patch("snowflake.connector.connect") +@mock.patch("snowflake.cli._plugins.snowpark.commands.StageManager.put") +@mock.patch( + "snowflake.cli._plugins.connection.util.get_ui_parameters", + return_value={UIParameter.NA_ENABLE_REGIONLESS_REDIRECT: "false"}, +) +def test_deploy_with_artifacts_from_other_directory( + mock_param, + mock_sm_put, + mock_conn, + mock_cursor, + runner, + mock_ctx, + project_directory, + alter_snowflake_yml, + artifacts, + paths, +): + ctx = mock_ctx( + mock_cursor( + rows=[ + {"SYSTEM$GET_SNOWSIGHT_HOST()": "https://snowsight.domain"}, + {"REGIONLESS": "false"}, + {"CURRENT_ACCOUNT_NAME()": "https://snowsight.domain"}, + ], + columns=["SYSTEM$GET_SNOWSIGHT_HOST()"], + ) + ) + mock_conn.return_value = ctx + + streamlit_files = [ + "streamlit_app.py", + "pages/my_page.py", + "environment.yml", + ] + + with project_directory("glob_patterns") as tmp: + os.chdir(Path(os.getcwd()).parent) + alter_snowflake_yml( + tmp / "snowflake.yml", + "entities.my_streamlit.artifacts", + streamlit_files + [artifacts], + ) + + result = runner.invoke(["streamlit", "deploy", "-p", tmp, "--replace"]) + assert result.exit_code == 0, result.output + + put_calls = _extract_put_calls(mock_sm_put) + for path in paths: + assert { + "local_path": tmp / path["local"], + "stage_path": "@MockDatabase.MockSchema.streamlit/test_streamlit_deploy_snowcli" + + path["stage"], + } in put_calls + + +def _extract_put_calls(mock_sm_put): + # Extract the put calls from the mock for better visibility in test logs + return [ + { + "local_path": call.kwargs.get("local_path"), + "stage_path": call.kwargs.get("stage_path"), + } + for call in mock_sm_put.mock_calls + if call.kwargs.get("local_path") + ] diff --git a/tests/streamlit/test_commands.py b/tests/streamlit/test_commands.py index 68464053bd..c6ab40afc0 100644 --- a/tests/streamlit/test_commands.py +++ b/tests/streamlit/test_commands.py @@ -55,9 +55,9 @@ def test_describe_streamlit(mock_connector, runner, mock_ctx): ] -def _put_query(source: str, dest: str): +def _put_query(project_root: Path, source: str, dest: str): return dedent( - f"put file://{Path(source)} {dest} auto_compress=false parallel=4 overwrite=True" + f"put file://{project_root.resolve() / 'output' / source} {dest} auto_compress=false parallel=4 overwrite=True" ) @@ -91,16 +91,18 @@ def test_deploy_only_streamlit_file( mock_connector.return_value = ctx mock_get_account.return_value = "my_account" - with project_directory("example_streamlit") as pdir: - (pdir / "environment.yml").unlink() - shutil.rmtree(pdir / "pages") + with project_directory("example_streamlit") as tmp_dir: + (tmp_dir / "environment.yml").unlink() + shutil.rmtree(tmp_dir / "pages") result = runner.invoke(["streamlit", "deploy"]) assert result.exit_code == 0, result.output assert ctx.get_queries() == [ "create stage if not exists IDENTIFIER('MockDatabase.MockSchema.streamlit')", _put_query( - "streamlit_app.py", "@MockDatabase.MockSchema.streamlit/test_streamlit" + tmp_dir, + "streamlit_app.py", + "@MockDatabase.MockSchema.streamlit/test_streamlit", ), dedent( f""" @@ -146,16 +148,18 @@ def test_deploy_only_streamlit_file_no_stage( mock_connector.return_value = ctx mock_get_account.return_value = "my_account" - with project_directory("example_streamlit_no_stage") as pdir: - (pdir / "environment.yml").unlink() - shutil.rmtree(pdir / "pages") + with project_directory("example_streamlit_no_stage") as tmp_dir: + (tmp_dir / "environment.yml").unlink() + shutil.rmtree(tmp_dir / "pages") result = runner.invoke(["streamlit", "deploy"]) assert result.exit_code == 0, result.output assert ctx.get_queries() == [ "create stage if not exists IDENTIFIER('MockDatabase.MockSchema.streamlit')", _put_query( - "streamlit_app.py", "@MockDatabase.MockSchema.streamlit/test_streamlit" + tmp_dir, + "streamlit_app.py", + "@MockDatabase.MockSchema.streamlit/test_streamlit", ), dedent( f""" @@ -200,18 +204,22 @@ def test_deploy_with_empty_pages( mock_connector.return_value = ctx mock_get_account.return_value = "my_account" - with project_directory("streamlit_empty_pages") as directory: - (directory / "pages").mkdir(parents=True, exist_ok=True) + with project_directory("streamlit_empty_pages") as tmp_dir: + (tmp_dir / "pages").mkdir(parents=True, exist_ok=True) result = runner.invoke(["streamlit", "deploy"]) assert result.exit_code == 0, result.output assert ctx.get_queries() == [ "create stage if not exists IDENTIFIER('MockDatabase.MockSchema.streamlit')", _put_query( - "streamlit_app.py", "@MockDatabase.MockSchema.streamlit/test_streamlit" + tmp_dir, + "streamlit_app.py", + "@MockDatabase.MockSchema.streamlit/test_streamlit", ), _put_query( - "environment.yml", "@MockDatabase.MockSchema.streamlit/test_streamlit" + tmp_dir, + "environment.yml", + "@MockDatabase.MockSchema.streamlit/test_streamlit", ), dedent( f""" @@ -223,7 +231,6 @@ def test_deploy_with_empty_pages( ).strip(), "select system$get_snowsight_host()", ] - assert "Skipping empty directory: pages" in result.output @mock.patch("snowflake.cli._plugins.connection.util.get_account") @@ -256,16 +263,18 @@ def test_deploy_only_streamlit_file_replace( mock_connector.return_value = ctx mock_get_account.return_value = "my_account" - with project_directory("example_streamlit") as pdir: - (pdir / "environment.yml").unlink() - shutil.rmtree(pdir / "pages") + with project_directory("example_streamlit") as tmp_dir: + (tmp_dir / "environment.yml").unlink() + shutil.rmtree(tmp_dir / "pages") result = runner.invoke(["streamlit", "deploy", "--replace"]) assert result.exit_code == 0, result.output assert ctx.get_queries() == [ "create stage if not exists IDENTIFIER('MockDatabase.MockSchema.streamlit')", _put_query( - "streamlit_app.py", "@MockDatabase.MockSchema.streamlit/test_streamlit" + tmp_dir, + "streamlit_app.py", + "@MockDatabase.MockSchema.streamlit/test_streamlit", ), dedent( f""" @@ -281,23 +290,6 @@ def test_deploy_only_streamlit_file_replace( mock_typer.launch.assert_not_called() -def test_artifacts_must_exists( - runner, mock_ctx, project_directory, alter_snowflake_yml, snapshot -): - with project_directory("example_streamlit_v2") as pdir: - alter_snowflake_yml( - pdir / "snowflake.yml", - parameter_path="entities.my_streamlit.artifacts.1", - value="foo_bar.py", - ) - - result = runner.invoke( - ["streamlit", "deploy"], - ) - assert result.exit_code == 1 - assert result.output == snapshot - - @mock.patch("snowflake.cli._plugins.streamlit.commands.typer") @mock.patch("snowflake.connector.connect") @mock.patch( @@ -354,8 +346,8 @@ def test_deploy_streamlit_and_environment_files( ) mock_connector.return_value = ctx - with project_directory("example_streamlit") as pdir: - shutil.rmtree(pdir / "pages") + with project_directory("example_streamlit") as tmp_dir: + shutil.rmtree(tmp_dir / "pages") result = runner.invoke(["streamlit", "deploy"]) @@ -363,8 +355,8 @@ def test_deploy_streamlit_and_environment_files( assert result.exit_code == 0, result.output assert ctx.get_queries() == [ "create stage if not exists IDENTIFIER('MockDatabase.MockSchema.streamlit')", - _put_query("streamlit_app.py", root_path), - _put_query("environment.yml", root_path), + _put_query(tmp_dir, "streamlit_app.py", root_path), + _put_query(tmp_dir, "environment.yml", root_path), dedent( f""" CREATE STREAMLIT IDENTIFIER('MockDatabase.MockSchema.{STREAMLIT_NAME}') @@ -399,16 +391,16 @@ def test_deploy_streamlit_and_pages_files( ) mock_connector.return_value = ctx - with project_directory("example_streamlit") as pdir: - (pdir / "environment.yml").unlink() + with project_directory("example_streamlit") as tmp_dir: + (tmp_dir / "environment.yml").unlink() result = runner.invoke(["streamlit", "deploy"]) root_path = f"@MockDatabase.MockSchema.streamlit/{STREAMLIT_NAME}" assert result.exit_code == 0, result.output assert ctx.get_queries() == [ "create stage if not exists IDENTIFIER('MockDatabase.MockSchema.streamlit')", - _put_query("streamlit_app.py", root_path), - _put_query("pages/*", f"{root_path}/pages"), + _put_query(tmp_dir, "streamlit_app.py", root_path), + _put_query(tmp_dir, "pages/my_page.py", f"{root_path}/pages"), dedent( f""" CREATE STREAMLIT IDENTIFIER('MockDatabase.MockSchema.{STREAMLIT_NAME}') @@ -443,18 +435,18 @@ def test_deploy_all_streamlit_files( ) mock_connector.return_value = ctx - with project_directory("streamlit_full_definition"): + with project_directory("streamlit_full_definition") as tmp_dir: result = runner.invoke(["streamlit", "deploy"]) root_path = f"@MockDatabase.MockSchema.streamlit/{STREAMLIT_NAME}" assert result.exit_code == 0, result.output assert ctx.get_queries() == [ "create stage if not exists IDENTIFIER('MockDatabase.MockSchema.streamlit')", - _put_query("streamlit_app.py", root_path), - _put_query("environment.yml", root_path), - _put_query("pages/*", f"{root_path}/pages"), - _put_query("utils/utils.py", f"{root_path}/utils"), - _put_query("extra_file.py", root_path), + _put_query(tmp_dir, "streamlit_app.py", root_path), + _put_query(tmp_dir, "environment.yml", root_path), + _put_query(tmp_dir, "pages/my_page.py", f"{root_path}/pages"), + _put_query(tmp_dir, "utils/utils.py", f"{root_path}/utils"), + _put_query(tmp_dir, "extra_file.py", root_path), dedent( f""" CREATE STREAMLIT IDENTIFIER('MockDatabase.MockSchema.{STREAMLIT_NAME}') @@ -491,16 +483,16 @@ def test_deploy_put_files_on_stage( with project_directory( "example_streamlit", merge_project_definition={"streamlit": {"stage": "streamlit_stage"}}, - ): + ) as tmp_dir: result = runner.invoke(["streamlit", "deploy"]) root_path = f"@MockDatabase.MockSchema.streamlit_stage/{STREAMLIT_NAME}" assert result.exit_code == 0, result.output assert ctx.get_queries() == [ "create stage if not exists IDENTIFIER('MockDatabase.MockSchema.streamlit_stage')", - _put_query("streamlit_app.py", root_path), - _put_query("environment.yml", root_path), - _put_query("pages/*", f"{root_path}/pages"), + _put_query(tmp_dir, "streamlit_app.py", root_path), + _put_query(tmp_dir, "environment.yml", root_path), + _put_query(tmp_dir, "pages/my_page.py", f"{root_path}/pages"), dedent( f""" CREATE STREAMLIT IDENTIFIER('MockDatabase.MockSchema.{STREAMLIT_NAME}') @@ -535,16 +527,18 @@ def test_deploy_all_streamlit_files_not_defaults( ) mock_connector.return_value = ctx - with project_directory("example_streamlit_no_defaults"): + with project_directory("example_streamlit_no_defaults") as tmp_dir: result = runner.invoke(["streamlit", "deploy"]) root_path = f"@MockDatabase.MockSchema.streamlit_stage/{STREAMLIT_NAME}" assert result.exit_code == 0, result.output assert ctx.get_queries() == [ "create stage if not exists IDENTIFIER('MockDatabase.MockSchema.streamlit_stage')", - _put_query("main.py", root_path), - _put_query("streamlit_environment.yml", root_path), - _put_query("streamlit_pages/*", f"{root_path}/streamlit_pages"), + _put_query(tmp_dir, "main.py", root_path), + _put_query(tmp_dir, "streamlit_environment.yml", root_path), + _put_query( + tmp_dir, "streamlit_pages/first_page.py", f"{root_path}/streamlit_pages" + ), dedent( f""" CREATE STREAMLIT IDENTIFIER('MockDatabase.MockSchema.{STREAMLIT_NAME}') @@ -597,7 +591,7 @@ def test_deploy_streamlit_main_and_pages_files_experimental( return_value=enable_streamlit_no_checkouts, ), ): - with project_directory("example_streamlit"): + with project_directory("example_streamlit") as tmp_dir: result = runner.invoke(["streamlit", "deploy", "--experimental"]) if enable_streamlit_versioned_stage: @@ -627,9 +621,9 @@ def test_deploy_streamlit_main_and_pages_files_experimental( """ ).strip(), post_create_command, - _put_query("streamlit_app.py", root_path), - _put_query("environment.yml", f"{root_path}"), - _put_query("pages/*", f"{root_path}/pages"), + _put_query(tmp_dir, "streamlit_app.py", root_path), + _put_query(tmp_dir, "environment.yml", f"{root_path}"), + _put_query(tmp_dir, "pages/my_page.py", f"{root_path}/pages"), "select system$get_snowsight_host()", "select current_account_name()", ] @@ -678,13 +672,11 @@ def test_deploy_streamlit_main_and_pages_files_experimental_double_deploy( ) ctx.queries = [] - with project_directory("example_streamlit"): + with project_directory("example_streamlit") as tmp_dir: result2 = runner.invoke(["streamlit", "deploy", "--experimental"]) - assert result2.exit_code == 0, result2.output - root_path = f"@streamlit/MockDatabase.MockSchema.{STREAMLIT_NAME}/default_checkout" - + assert result2.exit_code == 0, result2.output # Same as normal, except no ALTER query assert ctx.get_queries() == [ dedent( @@ -695,9 +687,9 @@ def test_deploy_streamlit_main_and_pages_files_experimental_double_deploy( TITLE = 'My Fancy Streamlit' """ ).strip(), - _put_query("streamlit_app.py", root_path), - _put_query("environment.yml", f"{root_path}"), - _put_query("pages/*", f"{root_path}/pages"), + _put_query(tmp_dir, "streamlit_app.py", root_path), + _put_query(tmp_dir, "environment.yml", f"{root_path}"), + _put_query(tmp_dir, "pages/my_page.py", f"{root_path}/pages"), "select system$get_snowsight_host()", "select current_account_name()", ] @@ -734,7 +726,7 @@ def test_deploy_streamlit_main_and_pages_files_experimental_no_stage( "snowflake.cli.api.feature_flags.FeatureFlag.ENABLE_STREAMLIT_VERSIONED_STAGE.is_enabled", return_value=enable_streamlit_versioned_stage, ): - with project_directory("example_streamlit_no_stage"): + with project_directory("example_streamlit_no_stage") as tmp_dir: result = runner.invoke(["streamlit", "deploy", "--experimental"]) if enable_streamlit_versioned_stage: @@ -758,9 +750,9 @@ def test_deploy_streamlit_main_and_pages_files_experimental_no_stage( """ ).strip(), post_create_command, - _put_query("streamlit_app.py", root_path), - _put_query("environment.yml", f"{root_path}"), - _put_query("pages/*", f"{root_path}/pages"), + _put_query(tmp_dir, "streamlit_app.py", root_path), + _put_query(tmp_dir, "environment.yml", f"{root_path}"), + _put_query(tmp_dir, "pages/my_page.py", f"{root_path}/pages"), f"select system$get_snowsight_host()", f"select current_account_name()", ] @@ -786,7 +778,7 @@ def test_deploy_streamlit_main_and_pages_files_experimental_replace( ) mock_connector.return_value = ctx - with project_directory("example_streamlit"): + with project_directory("example_streamlit") as tmp_dir: result = runner.invoke(["streamlit", "deploy", "--experimental", "--replace"]) root_path = f"@streamlit/MockDatabase.MockSchema.{STREAMLIT_NAME}/default_checkout" @@ -801,9 +793,9 @@ def test_deploy_streamlit_main_and_pages_files_experimental_replace( """ ).strip(), f"ALTER streamlit MockDatabase.MockSchema.{STREAMLIT_NAME} CHECKOUT", - _put_query("streamlit_app.py", root_path), - _put_query("environment.yml", f"{root_path}"), - _put_query("pages/*", f"{root_path}/pages"), + _put_query(tmp_dir, "streamlit_app.py", root_path), + _put_query(tmp_dir, "environment.yml", f"{root_path}"), + _put_query(tmp_dir, "pages/my_page.py", f"{root_path}/pages"), f"select system$get_snowsight_host()", f"select current_account_name()", ] @@ -960,7 +952,7 @@ def test_deploy_streamlit_with_comment_v2( ) mock_connector.return_value = ctx - with project_directory("example_streamlit_with_comment_v2"): + with project_directory("example_streamlit_with_comment_v2") as tmp_dir: result = runner.invoke(["streamlit", "deploy", "--replace"]) root_path = f"@MockDatabase.MockSchema.streamlit/test_streamlit_deploy_snowcli" @@ -968,9 +960,9 @@ def test_deploy_streamlit_with_comment_v2( assert ctx.get_queries() == [ f"describe streamlit IDENTIFIER('MockDatabase.MockSchema.test_streamlit_deploy_snowcli')", "create stage if not exists IDENTIFIER('MockDatabase.MockSchema.streamlit')", - _put_query("streamlit_app.py", root_path), - _put_query("pages/*", f"{root_path}/pages"), - _put_query("environment.yml", root_path), + _put_query(tmp_dir, "streamlit_app.py", root_path), + _put_query(tmp_dir, "pages/my_page.py", f"{root_path}/pages"), + _put_query(tmp_dir, "environment.yml", root_path), dedent( f""" CREATE OR REPLACE STREAMLIT IDENTIFIER('MockDatabase.MockSchema.test_streamlit_deploy_snowcli') diff --git a/tests/streamlit/test_streamlit_manager.py b/tests/streamlit/test_streamlit_manager.py index 4f04fe854c..6197d6c58b 100644 --- a/tests/streamlit/test_streamlit_manager.py +++ b/tests/streamlit/test_streamlit_manager.py @@ -7,6 +7,9 @@ from snowflake.cli._plugins.streamlit.streamlit_entity_model import ( StreamlitEntityModel, ) +from snowflake.cli._plugins.streamlit.streamlit_project_paths import ( + StreamlitProjectPaths, +) from snowflake.cli.api.identifiers import FQN mock_streamlit_exists = mock.patch( @@ -22,7 +25,7 @@ def test_deploy_streamlit(mock_execute_query, _, mock_stage_manager, temp_dir): mock_stage_manager().get_standard_stage_prefix.return_value = "stage_root" - main_file = Path(temp_dir) / "main.py" + main_file = Path("main.py") main_file.touch() st = StreamlitEntityModel( @@ -33,11 +36,13 @@ def test_deploy_streamlit(mock_execute_query, _, mock_stage_manager, temp_dir): main_file=str(main_file), imports=["@stage/foo.py", "@stage/bar.py"], # Possibly can be PathMapping - artifacts=[main_file], + artifacts=[str(main_file)], ) + streamlit_project_paths = StreamlitProjectPaths(Path().absolute()) + StreamlitManager(MagicMock(database="DB", schema="SH")).deploy( - streamlit=st, replace=False + streamlit=st, streamlit_project_paths=streamlit_project_paths, replace=False ) mock_execute_query.assert_called_once_with( @@ -62,7 +67,7 @@ def test_deploy_streamlit_with_api_integrations( ): mock_stage_manager().get_standard_stage_prefix.return_value = "stage_root" - main_file = Path(temp_dir) / "main.py" + main_file = Path("main.py") main_file.touch() st = StreamlitEntityModel( @@ -72,13 +77,15 @@ def test_deploy_streamlit_with_api_integrations( query_warehouse="My_WH", main_file=str(main_file), # Possibly can be PathMapping - artifacts=[main_file], + artifacts=[str(main_file)], external_access_integrations=["MY_INTERGATION", "OTHER"], secrets={"my_secret": "SecretOfTheSecrets", "other": "other_secret"}, ) + streamlit_project_paths = StreamlitProjectPaths(Path().absolute()) + StreamlitManager(MagicMock(database="DB", schema="SH")).deploy( - streamlit=st, replace=False + streamlit=st, streamlit_project_paths=streamlit_project_paths, replace=False ) mock_execute_query.assert_called_once_with( @@ -104,7 +111,7 @@ def test_deploy_streamlit_with_comment( ): mock_stage_manager().get_standard_stage_prefix.return_value = "stage_root" - main_file = Path(temp_dir) / "main.py" + main_file = Path("main.py") main_file.touch() st = StreamlitEntityModel( @@ -113,12 +120,14 @@ def test_deploy_streamlit_with_comment( title="MyStreamlit", query_warehouse="My_WH", main_file=str(main_file), - artifacts=[main_file], + artifacts=[str(main_file)], comment="This is a test comment", ) + streamlit_project_paths = StreamlitProjectPaths(Path().absolute()) + StreamlitManager(MagicMock(database="DB", schema="SH")).deploy( - streamlit=st, replace=False + streamlit=st, streamlit_project_paths=streamlit_project_paths, replace=False ) mock_execute_query.assert_called_once_with( diff --git a/tests/test_data/projects/glob_patterns/environment.yml b/tests/test_data/projects/glob_patterns/environment.yml new file mode 100644 index 0000000000..ac8feac3e8 --- /dev/null +++ b/tests/test_data/projects/glob_patterns/environment.yml @@ -0,0 +1,5 @@ +name: sf_env +channels: + - snowflake +dependencies: + - pandas diff --git a/tests/test_data/projects/glob_patterns/main.py b/tests/test_data/projects/glob_patterns/main.py new file mode 100644 index 0000000000..52c7b0751f --- /dev/null +++ b/tests/test_data/projects/glob_patterns/main.py @@ -0,0 +1,18 @@ +from __future__ import annotations + +import sys + +from procedures import hello_procedure +from snowflake.snowpark import Session + +# For local debugging. Be aware you may need to type-convert arguments if +# you add input parameters +if __name__ == "__main__": + from snowflake.cli.api.config import cli_config + + session = Session.builder.configs(cli_config.get_connection_dict("dev")).create() + if len(sys.argv) > 1: + print(hello_procedure(session, *sys.argv[1:])) # type: ignore + else: + print(hello_procedure(session)) # type: ignore + session.close() diff --git a/tests/test_data/projects/glob_patterns/pages/my_page.py b/tests/test_data/projects/glob_patterns/pages/my_page.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/test_data/projects/glob_patterns/snowflake.yml b/tests/test_data/projects/glob_patterns/snowflake.yml new file mode 100644 index 0000000000..38c39e7566 --- /dev/null +++ b/tests/test_data/projects/glob_patterns/snowflake.yml @@ -0,0 +1,23 @@ +definition_version: 2 +entities: + hello_procedure: + artifacts: + - # set in test + handler: hello + identifier: + name: hello_procedure + returns: string + signature: + - name: "name" + type: "string" + stage: dev_deployment + type: procedure + my_streamlit: + type: "streamlit" + identifier: test_streamlit_deploy_snowcli + title: "My Fancy Streamlit" + stage: streamlit + query_warehouse: xsmall + main_file: streamlit_app.py + artifacts: + - # set in test diff --git a/tests/test_data/projects/glob_patterns/src/app.py b/tests/test_data/projects/glob_patterns/src/app.py new file mode 100644 index 0000000000..6dac2047d2 --- /dev/null +++ b/tests/test_data/projects/glob_patterns/src/app.py @@ -0,0 +1,12 @@ +from __future__ import annotations + +from dir.dir_app import print_hello +from snowflake.snowpark import Session + + +def hello_procedure(session: Session, name: str) -> str: + return print_hello(name) + + +def hello_function(name: str) -> str: + return print_hello(name) diff --git a/tests/test_data/projects/glob_patterns/src/dir/dir_app.py b/tests/test_data/projects/glob_patterns/src/dir/dir_app.py new file mode 100644 index 0000000000..055739db8d --- /dev/null +++ b/tests/test_data/projects/glob_patterns/src/dir/dir_app.py @@ -0,0 +1,2 @@ +def print_hello(name: str): + print(f"Hello, {name}!") diff --git a/tests/test_data/projects/glob_patterns/streamlit_app.py b/tests/test_data/projects/glob_patterns/streamlit_app.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/test_data/projects/glob_patterns_zip/commons/helpers.py b/tests/test_data/projects/glob_patterns_zip/commons/helpers.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/test_data/projects/glob_patterns_zip/environment.yml b/tests/test_data/projects/glob_patterns_zip/environment.yml new file mode 100644 index 0000000000..ac8feac3e8 --- /dev/null +++ b/tests/test_data/projects/glob_patterns_zip/environment.yml @@ -0,0 +1,5 @@ +name: sf_env +channels: + - snowflake +dependencies: + - pandas diff --git a/tests/test_data/projects/glob_patterns_zip/main.py b/tests/test_data/projects/glob_patterns_zip/main.py new file mode 100644 index 0000000000..52c7b0751f --- /dev/null +++ b/tests/test_data/projects/glob_patterns_zip/main.py @@ -0,0 +1,18 @@ +from __future__ import annotations + +import sys + +from procedures import hello_procedure +from snowflake.snowpark import Session + +# For local debugging. Be aware you may need to type-convert arguments if +# you add input parameters +if __name__ == "__main__": + from snowflake.cli.api.config import cli_config + + session = Session.builder.configs(cli_config.get_connection_dict("dev")).create() + if len(sys.argv) > 1: + print(hello_procedure(session, *sys.argv[1:])) # type: ignore + else: + print(hello_procedure(session)) # type: ignore + session.close() diff --git a/tests/test_data/projects/glob_patterns_zip/snowflake.yml b/tests/test_data/projects/glob_patterns_zip/snowflake.yml new file mode 100644 index 0000000000..38c39e7566 --- /dev/null +++ b/tests/test_data/projects/glob_patterns_zip/snowflake.yml @@ -0,0 +1,23 @@ +definition_version: 2 +entities: + hello_procedure: + artifacts: + - # set in test + handler: hello + identifier: + name: hello_procedure + returns: string + signature: + - name: "name" + type: "string" + stage: dev_deployment + type: procedure + my_streamlit: + type: "streamlit" + identifier: test_streamlit_deploy_snowcli + title: "My Fancy Streamlit" + stage: streamlit + query_warehouse: xsmall + main_file: streamlit_app.py + artifacts: + - # set in test diff --git a/tests/test_data/projects/glob_patterns_zip/src/app.py b/tests/test_data/projects/glob_patterns_zip/src/app.py new file mode 100644 index 0000000000..6dac2047d2 --- /dev/null +++ b/tests/test_data/projects/glob_patterns_zip/src/app.py @@ -0,0 +1,12 @@ +from __future__ import annotations + +from dir.dir_app import print_hello +from snowflake.snowpark import Session + + +def hello_procedure(session: Session, name: str) -> str: + return print_hello(name) + + +def hello_function(name: str) -> str: + return print_hello(name) diff --git a/tests/test_data/projects/glob_patterns_zip/src/dir/dir_app.py b/tests/test_data/projects/glob_patterns_zip/src/dir/dir_app.py new file mode 100644 index 0000000000..055739db8d --- /dev/null +++ b/tests/test_data/projects/glob_patterns_zip/src/dir/dir_app.py @@ -0,0 +1,2 @@ +def print_hello(name: str): + print(f"Hello, {name}!") diff --git a/tests/test_data/projects/glob_patterns_zip/streamlit_app.py b/tests/test_data/projects/glob_patterns_zip/streamlit_app.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/testing_utils/fixtures.py b/tests/testing_utils/fixtures.py index ee731735b1..457acec1c6 100644 --- a/tests/testing_utils/fixtures.py +++ b/tests/testing_utils/fixtures.py @@ -466,3 +466,12 @@ def mock_procedure_description(mock_cursor): "installed_packages", ], ) + + +@pytest.fixture +def enable_snowpark_glob_support_feature_flag(): + with mock.patch( + f"snowflake.cli.api.feature_flags.FeatureFlag.ENABLE_SNOWPARK_GLOB_SUPPORT.is_enabled", + return_value=True, + ): + yield diff --git a/tests_integration/conftest.py b/tests_integration/conftest.py index 02d6fba10c..6be62f1ce5 100644 --- a/tests_integration/conftest.py +++ b/tests_integration/conftest.py @@ -24,6 +24,7 @@ from json import JSONDecodeError from pathlib import Path from typing import Any, Dict, List, Optional +from unittest import mock from uuid import uuid4 import pytest @@ -258,3 +259,12 @@ def resource_suffix(request): # To generate a suffix that isn't too long or complex, we use originalname, which is the # "bare" test function name, without filename, class name, or parameterization variables return f"_{uuid4().hex}_{request.node.originalname}" + + +@pytest.fixture +def enable_snowpark_glob_support_feature_flag(): + with mock.patch( + f"snowflake.cli.api.feature_flags.FeatureFlag.ENABLE_SNOWPARK_GLOB_SUPPORT.is_enabled", + return_value=True, + ): + yield diff --git a/tests_integration/nativeapp/test_teardown.py b/tests_integration/nativeapp/test_teardown.py index 44c94a8389..03d11a6e2b 100644 --- a/tests_integration/nativeapp/test_teardown.py +++ b/tests_integration/nativeapp/test_teardown.py @@ -261,6 +261,7 @@ def test_nativeapp_teardown_pkg_versions( assert result.exit_code == 0 +@pytest.mark.integration def test_nativeapp_teardown_multiple_apps_using_snow_app( runner, nativeapp_project_directory, @@ -322,6 +323,7 @@ def test_nativeapp_teardown_multiple_apps_using_snow_app( ) +@pytest.mark.integration def test_nativeapp_teardown_multiple_packages_using_snow_app_must_choose( runner, nativeapp_project_directory, diff --git a/tests_integration/test_data/projects/snowpark_glob_patterns/app_1/a.py b/tests_integration/test_data/projects/snowpark_glob_patterns/app_1/a.py new file mode 100644 index 0000000000..f92e382069 --- /dev/null +++ b/tests_integration/test_data/projects/snowpark_glob_patterns/app_1/a.py @@ -0,0 +1,12 @@ +from __future__ import annotations +from snowflake.snowpark import Session +from b import test_procedure + + +# test import +import syrupy + + +def hello_procedure(session: Session, name: str) -> str: + + return f"Hello {name}" + test_procedure(session) diff --git a/tests_integration/test_data/projects/snowpark_glob_patterns/app_1/b.py b/tests_integration/test_data/projects/snowpark_glob_patterns/app_1/b.py new file mode 100644 index 0000000000..bef124997f --- /dev/null +++ b/tests_integration/test_data/projects/snowpark_glob_patterns/app_1/b.py @@ -0,0 +1,10 @@ +from __future__ import annotations +from snowflake.snowpark import Session + + +# test import +import syrupy + + +def test_procedure(session: Session) -> str: + return "Test procedure" diff --git a/tests_integration/test_data/projects/snowpark_glob_patterns/app_2/c.py b/tests_integration/test_data/projects/snowpark_glob_patterns/app_2/c.py new file mode 100644 index 0000000000..f92e382069 --- /dev/null +++ b/tests_integration/test_data/projects/snowpark_glob_patterns/app_2/c.py @@ -0,0 +1,12 @@ +from __future__ import annotations +from snowflake.snowpark import Session +from b import test_procedure + + +# test import +import syrupy + + +def hello_procedure(session: Session, name: str) -> str: + + return f"Hello {name}" + test_procedure(session) diff --git a/tests_integration/test_data/projects/snowpark_glob_patterns/app_2/d.py b/tests_integration/test_data/projects/snowpark_glob_patterns/app_2/d.py new file mode 100644 index 0000000000..bef124997f --- /dev/null +++ b/tests_integration/test_data/projects/snowpark_glob_patterns/app_2/d.py @@ -0,0 +1,10 @@ +from __future__ import annotations +from snowflake.snowpark import Session + + +# test import +import syrupy + + +def test_procedure(session: Session) -> str: + return "Test procedure" diff --git a/tests_integration/test_data/projects/snowpark_glob_patterns/e.py b/tests_integration/test_data/projects/snowpark_glob_patterns/e.py new file mode 100644 index 0000000000..3ab4a6d6cc --- /dev/null +++ b/tests_integration/test_data/projects/snowpark_glob_patterns/e.py @@ -0,0 +1,9 @@ +from __future__ import annotations + + +# test import +import syrupy + + +def hello_function(name: str) -> str: + return f"Hello {name}!" diff --git a/tests_integration/test_data/projects/snowpark_glob_patterns/requirements.txt b/tests_integration/test_data/projects/snowpark_glob_patterns/requirements.txt new file mode 100644 index 0000000000..18af07a40d --- /dev/null +++ b/tests_integration/test_data/projects/snowpark_glob_patterns/requirements.txt @@ -0,0 +1 @@ +snowflake-snowpark-python syrupy \ No newline at end of file diff --git a/tests_integration/test_data/projects/snowpark_glob_patterns/snowflake.yml b/tests_integration/test_data/projects/snowpark_glob_patterns/snowflake.yml new file mode 100644 index 0000000000..1275f19916 --- /dev/null +++ b/tests_integration/test_data/projects/snowpark_glob_patterns/snowflake.yml @@ -0,0 +1,43 @@ +definition_version: 2 + +mixins: + snowpark_shared: + stage: "dev_deployment" + +entities: + hello_procedure: + type: "procedure" + stage: "stage_a" + identifier: + name: "hello_procedure" + handler: "a.hello_procedure" + signature: + - name: "name" + type: "string" + returns: string + artifacts: + - "app_1/*" + + test: + type: "procedure" + handler: "d.test_procedure" + signature: "" + returns: string + artifacts: + - "app_2/*" + meta: + use_mixins: + - "snowpark_shared" + + hello_function: + type: "function" + handler: "e.hello_function" + signature: + - name: "name" + type: "string" + returns: string + artifacts: + - "e.py" + meta: + use_mixins: + - "snowpark_shared" diff --git a/tests_integration/test_object.py b/tests_integration/test_object.py index a7d2a57bee..386150438b 100644 --- a/tests_integration/test_object.py +++ b/tests_integration/test_object.py @@ -349,7 +349,7 @@ def test_create_error_undefined_database(runner): ) -@pytest.mark.int +@pytest.mark.integration def test_object_create_if_not_exist_and_replace(runner, test_database): result = runner.invoke_with_connection( diff --git a/tests_integration/test_snowpark.py b/tests_integration/test_snowpark.py index 7701581f74..c55f33ee90 100644 --- a/tests_integration/test_snowpark.py +++ b/tests_integration/test_snowpark.py @@ -37,6 +37,313 @@ @pytest.mark.integration def test_snowpark_flow( + _test_steps, + project_directory, + alter_snowflake_yml, + test_database, + enable_snowpark_glob_support_feature_flag, +): + database = test_database.upper() + with project_directory("snowpark") as tmp_dir: + _test_steps.snowpark_build_should_zip_files( + additional_files=[ + Path("output"), + Path("output") / "my_snowpark_project", + Path("output") / "my_snowpark_project" / "app.zip", + ] + ) + + _test_steps.snowpark_deploy_should_finish_successfully_and_return( + [ + { + "object": f"{database}.PUBLIC.hello_procedure(name string)", + "status": "created", + "type": "procedure", + }, + { + "object": f"{database}.PUBLIC.test()", + "status": "created", + "type": "procedure", + }, + { + "object": f"{database}.PUBLIC.hello_function(name string)", + "status": "created", + "type": "function", + }, + ] + ) + + _test_steps.assert_those_procedures_are_in_snowflake( + "hello_procedure(VARCHAR) RETURN VARCHAR" + ) + _test_steps.assert_those_functions_are_in_snowflake( + "hello_function(VARCHAR) RETURN VARCHAR" + ) + + expected_files = [ + f"{STAGE_NAME}/my_snowpark_project/app.zip", + f"{STAGE_NAME}/dependencies.zip", + ] + _test_steps.assert_that_only_these_files_are_staged_in_test_db( + *expected_files, stage_name=STAGE_NAME + ) + + # Listing procedures or functions shows created objects + _test_steps.object_show_includes_given_identifiers( + object_type="procedure", + identifier=("hello_procedure", "(VARCHAR) RETURN VARCHAR"), + ) + _test_steps.object_show_includes_given_identifiers( + object_type="function", + identifier=("hello_function", "(VARCHAR) RETURN VARCHAR"), + ) + + # Created objects can be described + _test_steps.object_describe_should_return_entity_description( + object_type="procedure", + identifier="hello_procedure(VARCHAR)", + signature="(NAME VARCHAR)", + returns=RETURN_TYPE, + ) + + _test_steps.object_describe_should_return_entity_description( + object_type="function", + identifier="hello_function(VARCHAR)", + signature="(NAME VARCHAR)", + returns=RETURN_TYPE, + ) + + # Grants are given correctly + + _test_steps.set_grants_on_selected_object( + object_type="procedure", + object_name="hello_procedure(VARCHAR)", + privillege="USAGE", + role="test_role", + ) + + _test_steps.set_grants_on_selected_object( + object_type="function", + object_name="hello_function(VARCHAR)", + privillege="USAGE", + role="test_role", + ) + + _test_steps.assert_that_object_has_expected_grant( + object_type="procedure", + object_name="hello_procedure(VARCHAR)", + expected_privillege="USAGE", + expected_role="test_role", + ) + + _test_steps.assert_that_object_has_expected_grant( + object_type="function", + object_name="hello_function(VARCHAR)", + expected_privillege="USAGE", + expected_role="test_role", + ) + + # Created objects can be executed + _test_steps.snowpark_execute_should_return_expected_value( + object_type="procedure", + identifier="hello_procedure('foo')", + expected_value="Hello foo", + ) + + _test_steps.snowpark_execute_should_return_expected_value( + object_type="function", + identifier="hello_function('foo')", + expected_value="Hello foo!", + ) + + # Subsequent deploy of same object should fail + _test_steps.snowpark_deploy_should_return_error_with_message_contains( + "Following objects already exists" + ) + + # Apply changes to project objects + alter_snowflake_yml( + tmp_dir / "snowflake.yml", + parameter_path="snowpark.procedures.0.returns", + value="variant", + ) + alter_snowflake_yml( + tmp_dir / "snowflake.yml", + parameter_path="snowpark.functions.0.returns", + value="variant", + ) + + # Now we deploy with replace flag, it should update existing objects + _test_steps.snowpark_deploy_should_finish_successfully_and_return( + additional_arguments=["--replace"], + expected_result=[ + { + "object": f"{database}.PUBLIC.hello_procedure(name string)", + "status": "definition updated", + "type": "procedure", + }, + { + "object": f"{database}.PUBLIC.test()", + "status": "packages updated", + "type": "procedure", + }, + { + "object": f"{database}.PUBLIC.hello_function(name string)", + "status": "definition updated", + "type": "function", + }, + ], + ) + + # Apply another changes to project objects + alter_snowflake_yml( + tmp_dir / "snowflake.yml", + parameter_path="snowpark.procedures.0.execute_as_caller", + value="true", + ) + alter_snowflake_yml( + tmp_dir / "snowflake.yml", + parameter_path="snowpark.functions.0.runtime", + value="3.11", + ) + + # Another deploy with replace flag, it should update existing objects + _test_steps.snowpark_deploy_should_finish_successfully_and_return( + additional_arguments=["--replace"], + expected_result=[ + { + "object": f"{database}.PUBLIC.hello_procedure(name string)", + "status": "definition updated", + "type": "procedure", + }, + { + "object": f"{database}.PUBLIC.test()", + "status": "packages updated", + "type": "procedure", + }, + { + "object": f"{database}.PUBLIC.hello_function(name string)", + "status": "definition updated", + "type": "function", + }, + ], + ) + + # Check if objects were updated + _test_steps.assert_those_procedures_are_in_snowflake( + "hello_procedure(VARCHAR) RETURN VARIANT" + ) + _test_steps.assert_those_functions_are_in_snowflake( + "hello_function(VARCHAR) RETURN VARIANT" + ) + + _test_steps.assert_that_only_these_files_are_staged_in_test_db( + *expected_files, stage_name=STAGE_NAME + ) + + # Listing procedures or functions shows updated objects + _test_steps.object_show_includes_given_identifiers( + object_type="procedure", + identifier=("hello_procedure", "(VARCHAR) RETURN VARIANT"), + ) + _test_steps.object_show_includes_given_identifiers( + object_type="function", + identifier=("hello_function", "(VARCHAR) RETURN VARIANT"), + ) + + # Updated objects can be executed + _test_steps.snowpark_execute_should_return_expected_value( + object_type="procedure", + identifier="hello_procedure('foo')", + expected_value='"Hello foo"', + ) + + _test_steps.snowpark_execute_should_return_expected_value( + object_type="function", + identifier="hello_function('foo')", + expected_value='"Hello foo!"', + ) + + # Check if adding import triggers replace + _test_steps.package_should_build_proper_artifact( + "dummy_pkg_for_tests", "dummy_pkg_for_tests/shrubbery.py" + ) + _test_steps.package_should_upload_artifact_to_stage( + "dummy_pkg_for_tests.zip", STAGE_NAME + ) + + alter_snowflake_yml( + tmp_dir / "snowflake.yml", + parameter_path="snowpark.functions.0.imports", + value=["@dev_deployment/dummy_pkg_for_tests.zip"], + ) + + _test_steps.snowpark_deploy_should_finish_successfully_and_return( + additional_arguments=["--replace"], + expected_result=[ + { + "object": f"{database}.PUBLIC.hello_procedure(name string)", + "status": "packages updated", + "type": "procedure", + }, + { + "object": f"{database}.PUBLIC.test()", + "status": "packages updated", + "type": "procedure", + }, + { + "object": f"{database}.PUBLIC.hello_function(name string)", + "status": "definition updated", + "type": "function", + }, + ], + ) + + # Same file should be present, with addition of uploaded package + expected_files.append(f"{STAGE_NAME}/dummy_pkg_for_tests.zip") + + _test_steps.assert_that_only_these_files_are_staged_in_test_db( + *expected_files, stage_name=STAGE_NAME + ) + + # Grants are preserved after updates + + _test_steps.assert_that_object_has_expected_grant( + object_type="procedure", + object_name="hello_procedure(VARCHAR)", + expected_privillege="USAGE", + expected_role="test_role", + ) + + _test_steps.assert_that_object_has_expected_grant( + object_type="function", + object_name="hello_function(VARCHAR)", + expected_privillege="USAGE", + expected_role="test_role", + ) + + # Check if objects can be dropped + _test_steps.object_drop_should_finish_successfully( + object_type="procedure", identifier="hello_procedure(varchar)" + ) + _test_steps.object_drop_should_finish_successfully( + object_type="function", identifier="hello_function(varchar)" + ) + + _test_steps.object_show_should_return_no_data( + object_type="function", object_prefix="hello" + ) + _test_steps.object_show_should_return_no_data( + object_type="procedure", object_prefix="hello" + ) + + _test_steps.assert_that_only_these_files_are_staged_in_test_db( + *expected_files, stage_name=STAGE_NAME + ) + + +@pytest.mark.integration +def test_snowpark_flow_old_build( _test_steps, project_directory, alter_snowflake_yml, test_database ): database = test_database.upper() @@ -930,6 +1237,84 @@ def test_snowpark_aliases(project_directory, runner, _test_steps, test_database) @pytest.mark.integration def test_snowpark_flow_v2( + _test_steps, + project_directory, + alter_snowflake_yml, + test_database, + enable_snowpark_glob_support_feature_flag, +): + database = test_database.upper() + with project_directory("snowpark_v2") as tmp_dir: + _test_steps.snowpark_build_should_zip_files( + additional_files=[ + Path("output"), + Path("output") / "app_1.zip", + Path("output") / "app_2.zip", + Path("output") / "c.py", + ] + ) + _test_steps.snowpark_deploy_should_finish_successfully_and_return( + [ + { + "object": f"{database}.PUBLIC.hello_procedure(name string)", + "status": "created", + "type": "procedure", + }, + { + "object": f"{database}.PUBLIC.test()", + "status": "created", + "type": "procedure", + }, + { + "object": f"{database}.PUBLIC.hello_function(name string)", + "status": "created", + "type": "function", + }, + ] + ) + + _test_steps.assert_those_procedures_are_in_snowflake( + "hello_procedure(VARCHAR) RETURN VARCHAR" + ) + _test_steps.assert_those_functions_are_in_snowflake( + "hello_function(VARCHAR) RETURN VARCHAR" + ) + + _test_steps.assert_that_only_these_files_are_staged_in_test_db( + "stage_a/app_1.zip", + "stage_a/dependencies.zip", + stage_name="stage_a", + ) + + _test_steps.assert_that_only_these_files_are_staged_in_test_db( + f"{STAGE_NAME}/app_2.zip", + f"{STAGE_NAME}/c.py", + f"{STAGE_NAME}/dependencies.zip", + stage_name=STAGE_NAME, + ) + + # Created objects can be executed + _test_steps.snowpark_execute_should_return_expected_value( + object_type="procedure", + identifier="hello_procedure('foo')", + expected_value="Hello foo", + ) + + _test_steps.snowpark_execute_should_return_expected_value( + object_type="procedure", + identifier="test()", + expected_value="Test procedure", + ) + + _test_steps.snowpark_execute_should_return_expected_value( + object_type="function", + identifier="hello_function('foo')", + expected_value="Hello foo!", + ) + + +@pytest.mark.integration +def test_snowpark_flow_v2_old_build( _test_steps, project_directory, alter_snowflake_yml, test_database ): database = test_database.upper() @@ -997,6 +1382,50 @@ def test_snowpark_flow_v2( ) +@pytest.mark.integration +def test_snowpark_with_glob_patterns( + _test_steps, + project_directory, + alter_snowflake_yml, + test_database, + enable_snowpark_glob_support_feature_flag, +): + database = test_database.upper() + with project_directory("snowpark_glob_patterns"): + _test_steps.snowpark_build_should_zip_files( + additional_files=[ + Path("output"), + Path("output") / "app_1.zip", + Path("output") / "app_2.zip", + Path("output") / "e.py", + ] + ) + _test_steps.snowpark_deploy_should_finish_successfully_and_return( + [ + { + "object": f"{database}.PUBLIC.hello_procedure(name string)", + "status": "created", + "type": "procedure", + }, + { + "object": f"{database}.PUBLIC.test()", + "status": "created", + "type": "procedure", + }, + { + "object": f"{database}.PUBLIC.hello_function(name string)", + "status": "created", + "type": "function", + }, + ] + ) + _test_steps.snowpark_execute_should_return_expected_value( + object_type="procedure", + identifier="hello_procedure('foo')", + expected_value="Hello foo" + "Test procedure", + ) + + @pytest.fixture def _test_setup( runner, diff --git a/tests_integration/testing_utils/snowpark_utils.py b/tests_integration/testing_utils/snowpark_utils.py index 9551dd5a78..33412f2d0f 100644 --- a/tests_integration/testing_utils/snowpark_utils.py +++ b/tests_integration/testing_utils/snowpark_utils.py @@ -24,6 +24,7 @@ from syrupy import SnapshotAssertion +from snowflake.cli.api.feature_flags import FeatureFlag from tests_integration.conftest import SnowCLIRunner from tests_integration.testing_utils import assert_that_result_is_error from tests_integration.testing_utils.assertions.test_file_assertions import ( @@ -170,7 +171,10 @@ def snowpark_build_should_zip_files( additional_files = [] if not no_dependencies: - additional_files.append(Path("dependencies.zip")) + if FeatureFlag.ENABLE_SNOWPARK_GLOB_SUPPORT.is_enabled(): + additional_files.append(Path("output") / "dependencies.zip") + else: + additional_files.append(Path("dependencies.zip")) current_files = set(Path(".").glob("**/*")) result = self._setup.runner.invoke_with_connection_json(